alibabacloud-sls20201230 5.9.0__py3-none-any.whl → 5.10.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- alibabacloud_sls20201230/__init__.py +1 -1
- alibabacloud_sls20201230/client.py +1754 -156
- alibabacloud_sls20201230/models.py +1662 -275
- {alibabacloud_sls20201230-5.9.0.dist-info → alibabacloud_sls20201230-5.10.0.dist-info}/METADATA +2 -2
- alibabacloud_sls20201230-5.10.0.dist-info/RECORD +8 -0
- alibabacloud_sls20201230-5.9.0.dist-info/RECORD +0 -8
- {alibabacloud_sls20201230-5.9.0.dist-info → alibabacloud_sls20201230-5.10.0.dist-info}/LICENSE +0 -0
- {alibabacloud_sls20201230-5.9.0.dist-info → alibabacloud_sls20201230-5.10.0.dist-info}/WHEEL +0 -0
- {alibabacloud_sls20201230-5.9.0.dist-info → alibabacloud_sls20201230-5.10.0.dist-info}/top_level.txt +0 -0
|
@@ -877,6 +877,261 @@ class Alert(TeaModel):
|
|
|
877
877
|
return self
|
|
878
878
|
|
|
879
879
|
|
|
880
|
+
class AzureBlobIngestionConfigurationSource(TeaModel):
|
|
881
|
+
def __init__(
|
|
882
|
+
self,
|
|
883
|
+
account_key: str = None,
|
|
884
|
+
account_name: str = None,
|
|
885
|
+
compression_codec: str = None,
|
|
886
|
+
container_name: str = None,
|
|
887
|
+
encoding: str = None,
|
|
888
|
+
end_time: int = None,
|
|
889
|
+
format: Dict[str, Any] = None,
|
|
890
|
+
interval: str = None,
|
|
891
|
+
pattern: str = None,
|
|
892
|
+
prefix: str = None,
|
|
893
|
+
processor_id: str = None,
|
|
894
|
+
start_time: int = None,
|
|
895
|
+
tag_pack_id: bool = None,
|
|
896
|
+
time_field: str = None,
|
|
897
|
+
time_format: str = None,
|
|
898
|
+
time_pattern: str = None,
|
|
899
|
+
time_zone: str = None,
|
|
900
|
+
):
|
|
901
|
+
# This parameter is required.
|
|
902
|
+
self.account_key = account_key
|
|
903
|
+
# This parameter is required.
|
|
904
|
+
self.account_name = account_name
|
|
905
|
+
# This parameter is required.
|
|
906
|
+
self.compression_codec = compression_codec
|
|
907
|
+
# This parameter is required.
|
|
908
|
+
self.container_name = container_name
|
|
909
|
+
# This parameter is required.
|
|
910
|
+
self.encoding = encoding
|
|
911
|
+
self.end_time = end_time
|
|
912
|
+
# This parameter is required.
|
|
913
|
+
self.format = format
|
|
914
|
+
# This parameter is required.
|
|
915
|
+
self.interval = interval
|
|
916
|
+
self.pattern = pattern
|
|
917
|
+
self.prefix = prefix
|
|
918
|
+
self.processor_id = processor_id
|
|
919
|
+
self.start_time = start_time
|
|
920
|
+
self.tag_pack_id = tag_pack_id
|
|
921
|
+
self.time_field = time_field
|
|
922
|
+
self.time_format = time_format
|
|
923
|
+
self.time_pattern = time_pattern
|
|
924
|
+
self.time_zone = time_zone
|
|
925
|
+
|
|
926
|
+
def validate(self):
|
|
927
|
+
pass
|
|
928
|
+
|
|
929
|
+
def to_map(self):
|
|
930
|
+
_map = super().to_map()
|
|
931
|
+
if _map is not None:
|
|
932
|
+
return _map
|
|
933
|
+
|
|
934
|
+
result = dict()
|
|
935
|
+
if self.account_key is not None:
|
|
936
|
+
result['accountKey'] = self.account_key
|
|
937
|
+
if self.account_name is not None:
|
|
938
|
+
result['accountName'] = self.account_name
|
|
939
|
+
if self.compression_codec is not None:
|
|
940
|
+
result['compressionCodec'] = self.compression_codec
|
|
941
|
+
if self.container_name is not None:
|
|
942
|
+
result['containerName'] = self.container_name
|
|
943
|
+
if self.encoding is not None:
|
|
944
|
+
result['encoding'] = self.encoding
|
|
945
|
+
if self.end_time is not None:
|
|
946
|
+
result['endTime'] = self.end_time
|
|
947
|
+
if self.format is not None:
|
|
948
|
+
result['format'] = self.format
|
|
949
|
+
if self.interval is not None:
|
|
950
|
+
result['interval'] = self.interval
|
|
951
|
+
if self.pattern is not None:
|
|
952
|
+
result['pattern'] = self.pattern
|
|
953
|
+
if self.prefix is not None:
|
|
954
|
+
result['prefix'] = self.prefix
|
|
955
|
+
if self.processor_id is not None:
|
|
956
|
+
result['processorId'] = self.processor_id
|
|
957
|
+
if self.start_time is not None:
|
|
958
|
+
result['startTime'] = self.start_time
|
|
959
|
+
if self.tag_pack_id is not None:
|
|
960
|
+
result['tagPackId'] = self.tag_pack_id
|
|
961
|
+
if self.time_field is not None:
|
|
962
|
+
result['timeField'] = self.time_field
|
|
963
|
+
if self.time_format is not None:
|
|
964
|
+
result['timeFormat'] = self.time_format
|
|
965
|
+
if self.time_pattern is not None:
|
|
966
|
+
result['timePattern'] = self.time_pattern
|
|
967
|
+
if self.time_zone is not None:
|
|
968
|
+
result['timeZone'] = self.time_zone
|
|
969
|
+
return result
|
|
970
|
+
|
|
971
|
+
def from_map(self, m: dict = None):
|
|
972
|
+
m = m or dict()
|
|
973
|
+
if m.get('accountKey') is not None:
|
|
974
|
+
self.account_key = m.get('accountKey')
|
|
975
|
+
if m.get('accountName') is not None:
|
|
976
|
+
self.account_name = m.get('accountName')
|
|
977
|
+
if m.get('compressionCodec') is not None:
|
|
978
|
+
self.compression_codec = m.get('compressionCodec')
|
|
979
|
+
if m.get('containerName') is not None:
|
|
980
|
+
self.container_name = m.get('containerName')
|
|
981
|
+
if m.get('encoding') is not None:
|
|
982
|
+
self.encoding = m.get('encoding')
|
|
983
|
+
if m.get('endTime') is not None:
|
|
984
|
+
self.end_time = m.get('endTime')
|
|
985
|
+
if m.get('format') is not None:
|
|
986
|
+
self.format = m.get('format')
|
|
987
|
+
if m.get('interval') is not None:
|
|
988
|
+
self.interval = m.get('interval')
|
|
989
|
+
if m.get('pattern') is not None:
|
|
990
|
+
self.pattern = m.get('pattern')
|
|
991
|
+
if m.get('prefix') is not None:
|
|
992
|
+
self.prefix = m.get('prefix')
|
|
993
|
+
if m.get('processorId') is not None:
|
|
994
|
+
self.processor_id = m.get('processorId')
|
|
995
|
+
if m.get('startTime') is not None:
|
|
996
|
+
self.start_time = m.get('startTime')
|
|
997
|
+
if m.get('tagPackId') is not None:
|
|
998
|
+
self.tag_pack_id = m.get('tagPackId')
|
|
999
|
+
if m.get('timeField') is not None:
|
|
1000
|
+
self.time_field = m.get('timeField')
|
|
1001
|
+
if m.get('timeFormat') is not None:
|
|
1002
|
+
self.time_format = m.get('timeFormat')
|
|
1003
|
+
if m.get('timePattern') is not None:
|
|
1004
|
+
self.time_pattern = m.get('timePattern')
|
|
1005
|
+
if m.get('timeZone') is not None:
|
|
1006
|
+
self.time_zone = m.get('timeZone')
|
|
1007
|
+
return self
|
|
1008
|
+
|
|
1009
|
+
|
|
1010
|
+
class AzureBlobIngestionConfiguration(TeaModel):
|
|
1011
|
+
def __init__(
|
|
1012
|
+
self,
|
|
1013
|
+
logstore: str = None,
|
|
1014
|
+
source: AzureBlobIngestionConfigurationSource = None,
|
|
1015
|
+
):
|
|
1016
|
+
self.logstore = logstore
|
|
1017
|
+
self.source = source
|
|
1018
|
+
|
|
1019
|
+
def validate(self):
|
|
1020
|
+
if self.source:
|
|
1021
|
+
self.source.validate()
|
|
1022
|
+
|
|
1023
|
+
def to_map(self):
|
|
1024
|
+
_map = super().to_map()
|
|
1025
|
+
if _map is not None:
|
|
1026
|
+
return _map
|
|
1027
|
+
|
|
1028
|
+
result = dict()
|
|
1029
|
+
if self.logstore is not None:
|
|
1030
|
+
result['logstore'] = self.logstore
|
|
1031
|
+
if self.source is not None:
|
|
1032
|
+
result['source'] = self.source.to_map()
|
|
1033
|
+
return result
|
|
1034
|
+
|
|
1035
|
+
def from_map(self, m: dict = None):
|
|
1036
|
+
m = m or dict()
|
|
1037
|
+
if m.get('logstore') is not None:
|
|
1038
|
+
self.logstore = m.get('logstore')
|
|
1039
|
+
if m.get('source') is not None:
|
|
1040
|
+
temp_model = AzureBlobIngestionConfigurationSource()
|
|
1041
|
+
self.source = temp_model.from_map(m['source'])
|
|
1042
|
+
return self
|
|
1043
|
+
|
|
1044
|
+
|
|
1045
|
+
class AzureBlobIngestion(TeaModel):
|
|
1046
|
+
def __init__(
|
|
1047
|
+
self,
|
|
1048
|
+
configuration: AzureBlobIngestionConfiguration = None,
|
|
1049
|
+
create_time: int = None,
|
|
1050
|
+
description: str = None,
|
|
1051
|
+
display_name: str = None,
|
|
1052
|
+
last_modified_time: int = None,
|
|
1053
|
+
name: str = None,
|
|
1054
|
+
processor_id: str = None,
|
|
1055
|
+
schedule: Schedule = None,
|
|
1056
|
+
schedule_id: str = None,
|
|
1057
|
+
status: str = None,
|
|
1058
|
+
):
|
|
1059
|
+
# This parameter is required.
|
|
1060
|
+
self.configuration = configuration
|
|
1061
|
+
self.create_time = create_time
|
|
1062
|
+
self.description = description
|
|
1063
|
+
# This parameter is required.
|
|
1064
|
+
self.display_name = display_name
|
|
1065
|
+
self.last_modified_time = last_modified_time
|
|
1066
|
+
# This parameter is required.
|
|
1067
|
+
self.name = name
|
|
1068
|
+
self.processor_id = processor_id
|
|
1069
|
+
# This parameter is required.
|
|
1070
|
+
self.schedule = schedule
|
|
1071
|
+
self.schedule_id = schedule_id
|
|
1072
|
+
self.status = status
|
|
1073
|
+
|
|
1074
|
+
def validate(self):
|
|
1075
|
+
if self.configuration:
|
|
1076
|
+
self.configuration.validate()
|
|
1077
|
+
if self.schedule:
|
|
1078
|
+
self.schedule.validate()
|
|
1079
|
+
|
|
1080
|
+
def to_map(self):
|
|
1081
|
+
_map = super().to_map()
|
|
1082
|
+
if _map is not None:
|
|
1083
|
+
return _map
|
|
1084
|
+
|
|
1085
|
+
result = dict()
|
|
1086
|
+
if self.configuration is not None:
|
|
1087
|
+
result['configuration'] = self.configuration.to_map()
|
|
1088
|
+
if self.create_time is not None:
|
|
1089
|
+
result['createTime'] = self.create_time
|
|
1090
|
+
if self.description is not None:
|
|
1091
|
+
result['description'] = self.description
|
|
1092
|
+
if self.display_name is not None:
|
|
1093
|
+
result['displayName'] = self.display_name
|
|
1094
|
+
if self.last_modified_time is not None:
|
|
1095
|
+
result['lastModifiedTime'] = self.last_modified_time
|
|
1096
|
+
if self.name is not None:
|
|
1097
|
+
result['name'] = self.name
|
|
1098
|
+
if self.processor_id is not None:
|
|
1099
|
+
result['processorId'] = self.processor_id
|
|
1100
|
+
if self.schedule is not None:
|
|
1101
|
+
result['schedule'] = self.schedule.to_map()
|
|
1102
|
+
if self.schedule_id is not None:
|
|
1103
|
+
result['scheduleId'] = self.schedule_id
|
|
1104
|
+
if self.status is not None:
|
|
1105
|
+
result['status'] = self.status
|
|
1106
|
+
return result
|
|
1107
|
+
|
|
1108
|
+
def from_map(self, m: dict = None):
|
|
1109
|
+
m = m or dict()
|
|
1110
|
+
if m.get('configuration') is not None:
|
|
1111
|
+
temp_model = AzureBlobIngestionConfiguration()
|
|
1112
|
+
self.configuration = temp_model.from_map(m['configuration'])
|
|
1113
|
+
if m.get('createTime') is not None:
|
|
1114
|
+
self.create_time = m.get('createTime')
|
|
1115
|
+
if m.get('description') is not None:
|
|
1116
|
+
self.description = m.get('description')
|
|
1117
|
+
if m.get('displayName') is not None:
|
|
1118
|
+
self.display_name = m.get('displayName')
|
|
1119
|
+
if m.get('lastModifiedTime') is not None:
|
|
1120
|
+
self.last_modified_time = m.get('lastModifiedTime')
|
|
1121
|
+
if m.get('name') is not None:
|
|
1122
|
+
self.name = m.get('name')
|
|
1123
|
+
if m.get('processorId') is not None:
|
|
1124
|
+
self.processor_id = m.get('processorId')
|
|
1125
|
+
if m.get('schedule') is not None:
|
|
1126
|
+
temp_model = Schedule()
|
|
1127
|
+
self.schedule = temp_model.from_map(m['schedule'])
|
|
1128
|
+
if m.get('scheduleId') is not None:
|
|
1129
|
+
self.schedule_id = m.get('scheduleId')
|
|
1130
|
+
if m.get('status') is not None:
|
|
1131
|
+
self.status = m.get('status')
|
|
1132
|
+
return self
|
|
1133
|
+
|
|
1134
|
+
|
|
880
1135
|
class ConsumeProcessorConfiguration(TeaModel):
|
|
881
1136
|
def __init__(
|
|
882
1137
|
self,
|
|
@@ -1249,11 +1504,11 @@ class ESIngestionConfigurationSource(TeaModel):
|
|
|
1249
1504
|
def __init__(
|
|
1250
1505
|
self,
|
|
1251
1506
|
bootstrap_servers: str = None,
|
|
1252
|
-
index: str = None,
|
|
1253
|
-
min_frag_range_sec: int = None,
|
|
1254
1507
|
connector_mode: str = None,
|
|
1255
1508
|
end_time: int = None,
|
|
1509
|
+
index: str = None,
|
|
1256
1510
|
max_data_delay_sec: int = None,
|
|
1511
|
+
min_frag_range_sec: int = None,
|
|
1257
1512
|
password: str = None,
|
|
1258
1513
|
query: str = None,
|
|
1259
1514
|
start_time: int = None,
|
|
@@ -1266,14 +1521,14 @@ class ESIngestionConfigurationSource(TeaModel):
|
|
|
1266
1521
|
# This parameter is required.
|
|
1267
1522
|
self.bootstrap_servers = bootstrap_servers
|
|
1268
1523
|
# This parameter is required.
|
|
1269
|
-
self.index = index
|
|
1270
|
-
# This parameter is required.
|
|
1271
|
-
self.min_frag_range_sec = min_frag_range_sec
|
|
1272
|
-
# This parameter is required.
|
|
1273
1524
|
self.connector_mode = connector_mode
|
|
1274
1525
|
self.end_time = end_time
|
|
1275
1526
|
# This parameter is required.
|
|
1527
|
+
self.index = index
|
|
1528
|
+
# This parameter is required.
|
|
1276
1529
|
self.max_data_delay_sec = max_data_delay_sec
|
|
1530
|
+
# This parameter is required.
|
|
1531
|
+
self.min_frag_range_sec = min_frag_range_sec
|
|
1277
1532
|
self.password = password
|
|
1278
1533
|
# This parameter is required.
|
|
1279
1534
|
self.query = query
|
|
@@ -1295,64 +1550,64 @@ class ESIngestionConfigurationSource(TeaModel):
|
|
|
1295
1550
|
result = dict()
|
|
1296
1551
|
if self.bootstrap_servers is not None:
|
|
1297
1552
|
result['BootstrapServers'] = self.bootstrap_servers
|
|
1553
|
+
if self.connector_mode is not None:
|
|
1554
|
+
result['ConnectorMode'] = self.connector_mode
|
|
1555
|
+
if self.end_time is not None:
|
|
1556
|
+
result['EndTime'] = self.end_time
|
|
1298
1557
|
if self.index is not None:
|
|
1299
1558
|
result['Index'] = self.index
|
|
1559
|
+
if self.max_data_delay_sec is not None:
|
|
1560
|
+
result['MaxDataDelaySec'] = self.max_data_delay_sec
|
|
1300
1561
|
if self.min_frag_range_sec is not None:
|
|
1301
1562
|
result['MinFragRangeSec'] = self.min_frag_range_sec
|
|
1302
|
-
if self.connector_mode is not None:
|
|
1303
|
-
result['connectorMode'] = self.connector_mode
|
|
1304
|
-
if self.end_time is not None:
|
|
1305
|
-
result['endTime'] = self.end_time
|
|
1306
|
-
if self.max_data_delay_sec is not None:
|
|
1307
|
-
result['maxDataDelaySec'] = self.max_data_delay_sec
|
|
1308
1563
|
if self.password is not None:
|
|
1309
|
-
result['
|
|
1564
|
+
result['Password'] = self.password
|
|
1310
1565
|
if self.query is not None:
|
|
1311
|
-
result['
|
|
1566
|
+
result['Query'] = self.query
|
|
1312
1567
|
if self.start_time is not None:
|
|
1313
|
-
result['
|
|
1568
|
+
result['StartTime'] = self.start_time
|
|
1314
1569
|
if self.time_field_name is not None:
|
|
1315
|
-
result['
|
|
1570
|
+
result['TimeFieldName'] = self.time_field_name
|
|
1316
1571
|
if self.time_format is not None:
|
|
1317
|
-
result['
|
|
1572
|
+
result['TimeFormat'] = self.time_format
|
|
1318
1573
|
if self.time_zone is not None:
|
|
1319
|
-
result['
|
|
1574
|
+
result['TimeZone'] = self.time_zone
|
|
1320
1575
|
if self.username is not None:
|
|
1321
|
-
result['
|
|
1576
|
+
result['Username'] = self.username
|
|
1322
1577
|
if self.vpc_id is not None:
|
|
1323
|
-
result['
|
|
1578
|
+
result['VpcId'] = self.vpc_id
|
|
1324
1579
|
return result
|
|
1325
1580
|
|
|
1326
1581
|
def from_map(self, m: dict = None):
|
|
1327
1582
|
m = m or dict()
|
|
1328
1583
|
if m.get('BootstrapServers') is not None:
|
|
1329
1584
|
self.bootstrap_servers = m.get('BootstrapServers')
|
|
1585
|
+
if m.get('ConnectorMode') is not None:
|
|
1586
|
+
self.connector_mode = m.get('ConnectorMode')
|
|
1587
|
+
if m.get('EndTime') is not None:
|
|
1588
|
+
self.end_time = m.get('EndTime')
|
|
1330
1589
|
if m.get('Index') is not None:
|
|
1331
1590
|
self.index = m.get('Index')
|
|
1591
|
+
if m.get('MaxDataDelaySec') is not None:
|
|
1592
|
+
self.max_data_delay_sec = m.get('MaxDataDelaySec')
|
|
1332
1593
|
if m.get('MinFragRangeSec') is not None:
|
|
1333
1594
|
self.min_frag_range_sec = m.get('MinFragRangeSec')
|
|
1334
|
-
if m.get('
|
|
1335
|
-
self.
|
|
1336
|
-
if m.get('
|
|
1337
|
-
self.
|
|
1338
|
-
if m.get('
|
|
1339
|
-
self.
|
|
1340
|
-
if m.get('
|
|
1341
|
-
self.
|
|
1342
|
-
if m.get('
|
|
1343
|
-
self.
|
|
1344
|
-
if m.get('
|
|
1345
|
-
self.
|
|
1346
|
-
if m.get('
|
|
1347
|
-
self.
|
|
1348
|
-
if m.get('
|
|
1349
|
-
self.
|
|
1350
|
-
if m.get('timeZone') is not None:
|
|
1351
|
-
self.time_zone = m.get('timeZone')
|
|
1352
|
-
if m.get('username') is not None:
|
|
1353
|
-
self.username = m.get('username')
|
|
1354
|
-
if m.get('vpcId') is not None:
|
|
1355
|
-
self.vpc_id = m.get('vpcId')
|
|
1595
|
+
if m.get('Password') is not None:
|
|
1596
|
+
self.password = m.get('Password')
|
|
1597
|
+
if m.get('Query') is not None:
|
|
1598
|
+
self.query = m.get('Query')
|
|
1599
|
+
if m.get('StartTime') is not None:
|
|
1600
|
+
self.start_time = m.get('StartTime')
|
|
1601
|
+
if m.get('TimeFieldName') is not None:
|
|
1602
|
+
self.time_field_name = m.get('TimeFieldName')
|
|
1603
|
+
if m.get('TimeFormat') is not None:
|
|
1604
|
+
self.time_format = m.get('TimeFormat')
|
|
1605
|
+
if m.get('TimeZone') is not None:
|
|
1606
|
+
self.time_zone = m.get('TimeZone')
|
|
1607
|
+
if m.get('Username') is not None:
|
|
1608
|
+
self.username = m.get('Username')
|
|
1609
|
+
if m.get('VpcId') is not None:
|
|
1610
|
+
self.vpc_id = m.get('VpcId')
|
|
1356
1611
|
return self
|
|
1357
1612
|
|
|
1358
1613
|
|
|
@@ -2098,6 +2353,7 @@ class KafkaIngestionConfigurationSource(TeaModel):
|
|
|
2098
2353
|
from_position: str = None,
|
|
2099
2354
|
name_resolutions: str = None,
|
|
2100
2355
|
parse_array: bool = None,
|
|
2356
|
+
processor_id: str = None,
|
|
2101
2357
|
time_field: str = None,
|
|
2102
2358
|
time_format: str = None,
|
|
2103
2359
|
time_pattern: str = None,
|
|
@@ -2119,6 +2375,7 @@ class KafkaIngestionConfigurationSource(TeaModel):
|
|
|
2119
2375
|
self.name_resolutions = name_resolutions
|
|
2120
2376
|
# This parameter is required.
|
|
2121
2377
|
self.parse_array = parse_array
|
|
2378
|
+
self.processor_id = processor_id
|
|
2122
2379
|
self.time_field = time_field
|
|
2123
2380
|
self.time_format = time_format
|
|
2124
2381
|
self.time_pattern = time_pattern
|
|
@@ -2156,6 +2413,8 @@ class KafkaIngestionConfigurationSource(TeaModel):
|
|
|
2156
2413
|
result['nameResolutions'] = self.name_resolutions
|
|
2157
2414
|
if self.parse_array is not None:
|
|
2158
2415
|
result['parseArray'] = self.parse_array
|
|
2416
|
+
if self.processor_id is not None:
|
|
2417
|
+
result['processorId'] = self.processor_id
|
|
2159
2418
|
if self.time_field is not None:
|
|
2160
2419
|
result['timeField'] = self.time_field
|
|
2161
2420
|
if self.time_format is not None:
|
|
@@ -2192,6 +2451,8 @@ class KafkaIngestionConfigurationSource(TeaModel):
|
|
|
2192
2451
|
self.name_resolutions = m.get('nameResolutions')
|
|
2193
2452
|
if m.get('parseArray') is not None:
|
|
2194
2453
|
self.parse_array = m.get('parseArray')
|
|
2454
|
+
if m.get('processorId') is not None:
|
|
2455
|
+
self.processor_id = m.get('processorId')
|
|
2195
2456
|
if m.get('timeField') is not None:
|
|
2196
2457
|
self.time_field = m.get('timeField')
|
|
2197
2458
|
if m.get('timeFormat') is not None:
|
|
@@ -2255,6 +2516,7 @@ class KafkaIngestion(TeaModel):
|
|
|
2255
2516
|
display_name: str = None,
|
|
2256
2517
|
last_modified_time: int = None,
|
|
2257
2518
|
name: str = None,
|
|
2519
|
+
processor_id: str = None,
|
|
2258
2520
|
schedule: Schedule = None,
|
|
2259
2521
|
schedule_id: str = None,
|
|
2260
2522
|
status: str = None,
|
|
@@ -2268,6 +2530,7 @@ class KafkaIngestion(TeaModel):
|
|
|
2268
2530
|
self.last_modified_time = last_modified_time
|
|
2269
2531
|
# This parameter is required.
|
|
2270
2532
|
self.name = name
|
|
2533
|
+
self.processor_id = processor_id
|
|
2271
2534
|
# This parameter is required.
|
|
2272
2535
|
self.schedule = schedule
|
|
2273
2536
|
self.schedule_id = schedule_id
|
|
@@ -2297,6 +2560,8 @@ class KafkaIngestion(TeaModel):
|
|
|
2297
2560
|
result['lastModifiedTime'] = self.last_modified_time
|
|
2298
2561
|
if self.name is not None:
|
|
2299
2562
|
result['name'] = self.name
|
|
2563
|
+
if self.processor_id is not None:
|
|
2564
|
+
result['processorId'] = self.processor_id
|
|
2300
2565
|
if self.schedule is not None:
|
|
2301
2566
|
result['schedule'] = self.schedule.to_map()
|
|
2302
2567
|
if self.schedule_id is not None:
|
|
@@ -2320,6 +2585,8 @@ class KafkaIngestion(TeaModel):
|
|
|
2320
2585
|
self.last_modified_time = m.get('lastModifiedTime')
|
|
2321
2586
|
if m.get('name') is not None:
|
|
2322
2587
|
self.name = m.get('name')
|
|
2588
|
+
if m.get('processorId') is not None:
|
|
2589
|
+
self.processor_id = m.get('processorId')
|
|
2323
2590
|
if m.get('schedule') is not None:
|
|
2324
2591
|
temp_model = Schedule()
|
|
2325
2592
|
self.schedule = temp_model.from_map(m['schedule'])
|
|
@@ -2370,11 +2637,13 @@ class LogItem(TeaModel):
|
|
|
2370
2637
|
self,
|
|
2371
2638
|
contents: List[LogContent] = None,
|
|
2372
2639
|
time: int = None,
|
|
2640
|
+
time_ns: int = None,
|
|
2373
2641
|
):
|
|
2374
2642
|
# This parameter is required.
|
|
2375
2643
|
self.contents = contents
|
|
2376
2644
|
# This parameter is required.
|
|
2377
2645
|
self.time = time
|
|
2646
|
+
self.time_ns = time_ns
|
|
2378
2647
|
|
|
2379
2648
|
def validate(self):
|
|
2380
2649
|
if self.contents:
|
|
@@ -2394,6 +2663,8 @@ class LogItem(TeaModel):
|
|
|
2394
2663
|
result['Contents'].append(k.to_map() if k else None)
|
|
2395
2664
|
if self.time is not None:
|
|
2396
2665
|
result['Time'] = self.time
|
|
2666
|
+
if self.time_ns is not None:
|
|
2667
|
+
result['TimeNs'] = self.time_ns
|
|
2397
2668
|
return result
|
|
2398
2669
|
|
|
2399
2670
|
def from_map(self, m: dict = None):
|
|
@@ -2405,6 +2676,8 @@ class LogItem(TeaModel):
|
|
|
2405
2676
|
self.contents.append(temp_model.from_map(k))
|
|
2406
2677
|
if m.get('Time') is not None:
|
|
2407
2678
|
self.time = m.get('Time')
|
|
2679
|
+
if m.get('TimeNs') is not None:
|
|
2680
|
+
self.time_ns = m.get('TimeNs')
|
|
2408
2681
|
return self
|
|
2409
2682
|
|
|
2410
2683
|
|
|
@@ -3794,6 +4067,7 @@ class OSSIngestionConfigurationSource(TeaModel):
|
|
|
3794
4067
|
interval: str = None,
|
|
3795
4068
|
pattern: str = None,
|
|
3796
4069
|
prefix: str = None,
|
|
4070
|
+
processor_id: str = None,
|
|
3797
4071
|
restore_object_enabled: bool = None,
|
|
3798
4072
|
role_arn: str = None,
|
|
3799
4073
|
start_time: int = None,
|
|
@@ -3819,6 +4093,7 @@ class OSSIngestionConfigurationSource(TeaModel):
|
|
|
3819
4093
|
self.interval = interval
|
|
3820
4094
|
self.pattern = pattern
|
|
3821
4095
|
self.prefix = prefix
|
|
4096
|
+
self.processor_id = processor_id
|
|
3822
4097
|
self.restore_object_enabled = restore_object_enabled
|
|
3823
4098
|
self.role_arn = role_arn
|
|
3824
4099
|
self.start_time = start_time
|
|
@@ -3857,6 +4132,8 @@ class OSSIngestionConfigurationSource(TeaModel):
|
|
|
3857
4132
|
result['pattern'] = self.pattern
|
|
3858
4133
|
if self.prefix is not None:
|
|
3859
4134
|
result['prefix'] = self.prefix
|
|
4135
|
+
if self.processor_id is not None:
|
|
4136
|
+
result['processorId'] = self.processor_id
|
|
3860
4137
|
if self.restore_object_enabled is not None:
|
|
3861
4138
|
result['restoreObjectEnabled'] = self.restore_object_enabled
|
|
3862
4139
|
if self.role_arn is not None:
|
|
@@ -3897,6 +4174,8 @@ class OSSIngestionConfigurationSource(TeaModel):
|
|
|
3897
4174
|
self.pattern = m.get('pattern')
|
|
3898
4175
|
if m.get('prefix') is not None:
|
|
3899
4176
|
self.prefix = m.get('prefix')
|
|
4177
|
+
if m.get('processorId') is not None:
|
|
4178
|
+
self.processor_id = m.get('processorId')
|
|
3900
4179
|
if m.get('restoreObjectEnabled') is not None:
|
|
3901
4180
|
self.restore_object_enabled = m.get('restoreObjectEnabled')
|
|
3902
4181
|
if m.get('roleARN') is not None:
|
|
@@ -3964,6 +4243,7 @@ class OSSIngestion(TeaModel):
|
|
|
3964
4243
|
display_name: str = None,
|
|
3965
4244
|
last_modified_time: int = None,
|
|
3966
4245
|
name: str = None,
|
|
4246
|
+
processor_id: str = None,
|
|
3967
4247
|
schedule: Schedule = None,
|
|
3968
4248
|
schedule_id: str = None,
|
|
3969
4249
|
status: str = None,
|
|
@@ -3977,6 +4257,7 @@ class OSSIngestion(TeaModel):
|
|
|
3977
4257
|
self.last_modified_time = last_modified_time
|
|
3978
4258
|
# This parameter is required.
|
|
3979
4259
|
self.name = name
|
|
4260
|
+
self.processor_id = processor_id
|
|
3980
4261
|
# This parameter is required.
|
|
3981
4262
|
self.schedule = schedule
|
|
3982
4263
|
self.schedule_id = schedule_id
|
|
@@ -4006,6 +4287,8 @@ class OSSIngestion(TeaModel):
|
|
|
4006
4287
|
result['lastModifiedTime'] = self.last_modified_time
|
|
4007
4288
|
if self.name is not None:
|
|
4008
4289
|
result['name'] = self.name
|
|
4290
|
+
if self.processor_id is not None:
|
|
4291
|
+
result['processorId'] = self.processor_id
|
|
4009
4292
|
if self.schedule is not None:
|
|
4010
4293
|
result['schedule'] = self.schedule.to_map()
|
|
4011
4294
|
if self.schedule_id is not None:
|
|
@@ -4029,6 +4312,8 @@ class OSSIngestion(TeaModel):
|
|
|
4029
4312
|
self.last_modified_time = m.get('lastModifiedTime')
|
|
4030
4313
|
if m.get('name') is not None:
|
|
4031
4314
|
self.name = m.get('name')
|
|
4315
|
+
if m.get('processorId') is not None:
|
|
4316
|
+
self.processor_id = m.get('processorId')
|
|
4032
4317
|
if m.get('schedule') is not None:
|
|
4033
4318
|
temp_model = Schedule()
|
|
4034
4319
|
self.schedule = temp_model.from_map(m['schedule'])
|
|
@@ -4142,10 +4427,12 @@ class S3IngestionConfigurationSource(TeaModel):
|
|
|
4142
4427
|
compression_codec: str = None,
|
|
4143
4428
|
encoding: str = None,
|
|
4144
4429
|
end_time: int = None,
|
|
4430
|
+
endpoint: str = None,
|
|
4145
4431
|
format: Dict[str, Any] = None,
|
|
4146
4432
|
interval: str = None,
|
|
4147
4433
|
pattern: str = None,
|
|
4148
4434
|
prefix: str = None,
|
|
4435
|
+
processor_id: str = None,
|
|
4149
4436
|
start_time: int = None,
|
|
4150
4437
|
tag_pack_id: bool = None,
|
|
4151
4438
|
time_field: str = None,
|
|
@@ -4169,12 +4456,14 @@ class S3IngestionConfigurationSource(TeaModel):
|
|
|
4169
4456
|
# This parameter is required.
|
|
4170
4457
|
self.encoding = encoding
|
|
4171
4458
|
self.end_time = end_time
|
|
4459
|
+
self.endpoint = endpoint
|
|
4172
4460
|
# This parameter is required.
|
|
4173
4461
|
self.format = format
|
|
4174
4462
|
# This parameter is required.
|
|
4175
4463
|
self.interval = interval
|
|
4176
4464
|
self.pattern = pattern
|
|
4177
4465
|
self.prefix = prefix
|
|
4466
|
+
self.processor_id = processor_id
|
|
4178
4467
|
self.start_time = start_time
|
|
4179
4468
|
self.tag_pack_id = tag_pack_id
|
|
4180
4469
|
self.time_field = time_field
|
|
@@ -4210,6 +4499,8 @@ class S3IngestionConfigurationSource(TeaModel):
|
|
|
4210
4499
|
result['encoding'] = self.encoding
|
|
4211
4500
|
if self.end_time is not None:
|
|
4212
4501
|
result['endTime'] = self.end_time
|
|
4502
|
+
if self.endpoint is not None:
|
|
4503
|
+
result['endpoint'] = self.endpoint
|
|
4213
4504
|
if self.format is not None:
|
|
4214
4505
|
result['format'] = self.format
|
|
4215
4506
|
if self.interval is not None:
|
|
@@ -4218,6 +4509,8 @@ class S3IngestionConfigurationSource(TeaModel):
|
|
|
4218
4509
|
result['pattern'] = self.pattern
|
|
4219
4510
|
if self.prefix is not None:
|
|
4220
4511
|
result['prefix'] = self.prefix
|
|
4512
|
+
if self.processor_id is not None:
|
|
4513
|
+
result['processorId'] = self.processor_id
|
|
4221
4514
|
if self.start_time is not None:
|
|
4222
4515
|
result['startTime'] = self.start_time
|
|
4223
4516
|
if self.tag_pack_id is not None:
|
|
@@ -4254,6 +4547,8 @@ class S3IngestionConfigurationSource(TeaModel):
|
|
|
4254
4547
|
self.encoding = m.get('encoding')
|
|
4255
4548
|
if m.get('endTime') is not None:
|
|
4256
4549
|
self.end_time = m.get('endTime')
|
|
4550
|
+
if m.get('endpoint') is not None:
|
|
4551
|
+
self.endpoint = m.get('endpoint')
|
|
4257
4552
|
if m.get('format') is not None:
|
|
4258
4553
|
self.format = m.get('format')
|
|
4259
4554
|
if m.get('interval') is not None:
|
|
@@ -4262,6 +4557,8 @@ class S3IngestionConfigurationSource(TeaModel):
|
|
|
4262
4557
|
self.pattern = m.get('pattern')
|
|
4263
4558
|
if m.get('prefix') is not None:
|
|
4264
4559
|
self.prefix = m.get('prefix')
|
|
4560
|
+
if m.get('processorId') is not None:
|
|
4561
|
+
self.processor_id = m.get('processorId')
|
|
4265
4562
|
if m.get('startTime') is not None:
|
|
4266
4563
|
self.start_time = m.get('startTime')
|
|
4267
4564
|
if m.get('tagPackId') is not None:
|
|
@@ -4288,6 +4585,7 @@ class S3Ingestion(TeaModel):
|
|
|
4288
4585
|
display_name: str = None,
|
|
4289
4586
|
last_modified_time: int = None,
|
|
4290
4587
|
name: str = None,
|
|
4588
|
+
processor_id: str = None,
|
|
4291
4589
|
schedule: Schedule = None,
|
|
4292
4590
|
schedule_id: str = None,
|
|
4293
4591
|
status: str = None,
|
|
@@ -4301,6 +4599,7 @@ class S3Ingestion(TeaModel):
|
|
|
4301
4599
|
self.last_modified_time = last_modified_time
|
|
4302
4600
|
# This parameter is required.
|
|
4303
4601
|
self.name = name
|
|
4602
|
+
self.processor_id = processor_id
|
|
4304
4603
|
# This parameter is required.
|
|
4305
4604
|
self.schedule = schedule
|
|
4306
4605
|
self.schedule_id = schedule_id
|
|
@@ -4330,6 +4629,8 @@ class S3Ingestion(TeaModel):
|
|
|
4330
4629
|
result['lastModifiedTime'] = self.last_modified_time
|
|
4331
4630
|
if self.name is not None:
|
|
4332
4631
|
result['name'] = self.name
|
|
4632
|
+
if self.processor_id is not None:
|
|
4633
|
+
result['processorId'] = self.processor_id
|
|
4333
4634
|
if self.schedule is not None:
|
|
4334
4635
|
result['schedule'] = self.schedule.to_map()
|
|
4335
4636
|
if self.schedule_id is not None:
|
|
@@ -4353,6 +4654,8 @@ class S3Ingestion(TeaModel):
|
|
|
4353
4654
|
self.last_modified_time = m.get('lastModifiedTime')
|
|
4354
4655
|
if m.get('name') is not None:
|
|
4355
4656
|
self.name = m.get('name')
|
|
4657
|
+
if m.get('processorId') is not None:
|
|
4658
|
+
self.processor_id = m.get('processorId')
|
|
4356
4659
|
if m.get('schedule') is not None:
|
|
4357
4660
|
temp_model = Schedule()
|
|
4358
4661
|
self.schedule = temp_model.from_map(m['schedule'])
|
|
@@ -4616,15 +4919,154 @@ class ScheduledSQL(TeaModel):
|
|
|
4616
4919
|
# This parameter is required.
|
|
4617
4920
|
self.name = name
|
|
4618
4921
|
# This parameter is required.
|
|
4619
|
-
self.schedule = schedule
|
|
4620
|
-
self.schedule_id = schedule_id
|
|
4621
|
-
self.status = status
|
|
4922
|
+
self.schedule = schedule
|
|
4923
|
+
self.schedule_id = schedule_id
|
|
4924
|
+
self.status = status
|
|
4925
|
+
|
|
4926
|
+
def validate(self):
|
|
4927
|
+
if self.configuration:
|
|
4928
|
+
self.configuration.validate()
|
|
4929
|
+
if self.schedule:
|
|
4930
|
+
self.schedule.validate()
|
|
4931
|
+
|
|
4932
|
+
def to_map(self):
|
|
4933
|
+
_map = super().to_map()
|
|
4934
|
+
if _map is not None:
|
|
4935
|
+
return _map
|
|
4936
|
+
|
|
4937
|
+
result = dict()
|
|
4938
|
+
if self.configuration is not None:
|
|
4939
|
+
result['configuration'] = self.configuration.to_map()
|
|
4940
|
+
if self.create_time is not None:
|
|
4941
|
+
result['createTime'] = self.create_time
|
|
4942
|
+
if self.description is not None:
|
|
4943
|
+
result['description'] = self.description
|
|
4944
|
+
if self.display_name is not None:
|
|
4945
|
+
result['displayName'] = self.display_name
|
|
4946
|
+
if self.last_modified_time is not None:
|
|
4947
|
+
result['lastModifiedTime'] = self.last_modified_time
|
|
4948
|
+
if self.name is not None:
|
|
4949
|
+
result['name'] = self.name
|
|
4950
|
+
if self.schedule is not None:
|
|
4951
|
+
result['schedule'] = self.schedule.to_map()
|
|
4952
|
+
if self.schedule_id is not None:
|
|
4953
|
+
result['scheduleId'] = self.schedule_id
|
|
4954
|
+
if self.status is not None:
|
|
4955
|
+
result['status'] = self.status
|
|
4956
|
+
return result
|
|
4957
|
+
|
|
4958
|
+
def from_map(self, m: dict = None):
|
|
4959
|
+
m = m or dict()
|
|
4960
|
+
if m.get('configuration') is not None:
|
|
4961
|
+
temp_model = ScheduledSQLConfiguration()
|
|
4962
|
+
self.configuration = temp_model.from_map(m['configuration'])
|
|
4963
|
+
if m.get('createTime') is not None:
|
|
4964
|
+
self.create_time = m.get('createTime')
|
|
4965
|
+
if m.get('description') is not None:
|
|
4966
|
+
self.description = m.get('description')
|
|
4967
|
+
if m.get('displayName') is not None:
|
|
4968
|
+
self.display_name = m.get('displayName')
|
|
4969
|
+
if m.get('lastModifiedTime') is not None:
|
|
4970
|
+
self.last_modified_time = m.get('lastModifiedTime')
|
|
4971
|
+
if m.get('name') is not None:
|
|
4972
|
+
self.name = m.get('name')
|
|
4973
|
+
if m.get('schedule') is not None:
|
|
4974
|
+
temp_model = Schedule()
|
|
4975
|
+
self.schedule = temp_model.from_map(m['schedule'])
|
|
4976
|
+
if m.get('scheduleId') is not None:
|
|
4977
|
+
self.schedule_id = m.get('scheduleId')
|
|
4978
|
+
if m.get('status') is not None:
|
|
4979
|
+
self.status = m.get('status')
|
|
4980
|
+
return self
|
|
4981
|
+
|
|
4982
|
+
|
|
4983
|
+
class ShardingPolicyShardGroup(TeaModel):
|
|
4984
|
+
def __init__(
|
|
4985
|
+
self,
|
|
4986
|
+
group_count: int = None,
|
|
4987
|
+
keys: List[str] = None,
|
|
4988
|
+
):
|
|
4989
|
+
# This parameter is required.
|
|
4990
|
+
self.group_count = group_count
|
|
4991
|
+
# This parameter is required.
|
|
4992
|
+
self.keys = keys
|
|
4993
|
+
|
|
4994
|
+
def validate(self):
|
|
4995
|
+
pass
|
|
4996
|
+
|
|
4997
|
+
def to_map(self):
|
|
4998
|
+
_map = super().to_map()
|
|
4999
|
+
if _map is not None:
|
|
5000
|
+
return _map
|
|
5001
|
+
|
|
5002
|
+
result = dict()
|
|
5003
|
+
if self.group_count is not None:
|
|
5004
|
+
result['groupCount'] = self.group_count
|
|
5005
|
+
if self.keys is not None:
|
|
5006
|
+
result['keys'] = self.keys
|
|
5007
|
+
return result
|
|
5008
|
+
|
|
5009
|
+
def from_map(self, m: dict = None):
|
|
5010
|
+
m = m or dict()
|
|
5011
|
+
if m.get('groupCount') is not None:
|
|
5012
|
+
self.group_count = m.get('groupCount')
|
|
5013
|
+
if m.get('keys') is not None:
|
|
5014
|
+
self.keys = m.get('keys')
|
|
5015
|
+
return self
|
|
5016
|
+
|
|
5017
|
+
|
|
5018
|
+
class ShardingPolicyShardHash(TeaModel):
|
|
5019
|
+
def __init__(
|
|
5020
|
+
self,
|
|
5021
|
+
keys: List[str] = None,
|
|
5022
|
+
max_hash_count: int = None,
|
|
5023
|
+
):
|
|
5024
|
+
# This parameter is required.
|
|
5025
|
+
self.keys = keys
|
|
5026
|
+
# This parameter is required.
|
|
5027
|
+
self.max_hash_count = max_hash_count
|
|
5028
|
+
|
|
5029
|
+
def validate(self):
|
|
5030
|
+
pass
|
|
5031
|
+
|
|
5032
|
+
def to_map(self):
|
|
5033
|
+
_map = super().to_map()
|
|
5034
|
+
if _map is not None:
|
|
5035
|
+
return _map
|
|
5036
|
+
|
|
5037
|
+
result = dict()
|
|
5038
|
+
if self.keys is not None:
|
|
5039
|
+
result['keys'] = self.keys
|
|
5040
|
+
if self.max_hash_count is not None:
|
|
5041
|
+
result['maxHashCount'] = self.max_hash_count
|
|
5042
|
+
return result
|
|
5043
|
+
|
|
5044
|
+
def from_map(self, m: dict = None):
|
|
5045
|
+
m = m or dict()
|
|
5046
|
+
if m.get('keys') is not None:
|
|
5047
|
+
self.keys = m.get('keys')
|
|
5048
|
+
if m.get('maxHashCount') is not None:
|
|
5049
|
+
self.max_hash_count = m.get('maxHashCount')
|
|
5050
|
+
return self
|
|
5051
|
+
|
|
5052
|
+
|
|
5053
|
+
class ShardingPolicy(TeaModel):
|
|
5054
|
+
def __init__(
|
|
5055
|
+
self,
|
|
5056
|
+
query_active_time: int = None,
|
|
5057
|
+
shard_group: ShardingPolicyShardGroup = None,
|
|
5058
|
+
shard_hash: ShardingPolicyShardHash = None,
|
|
5059
|
+
):
|
|
5060
|
+
self.query_active_time = query_active_time
|
|
5061
|
+
self.shard_group = shard_group
|
|
5062
|
+
# This parameter is required.
|
|
5063
|
+
self.shard_hash = shard_hash
|
|
4622
5064
|
|
|
4623
5065
|
def validate(self):
|
|
4624
|
-
if self.
|
|
4625
|
-
self.
|
|
4626
|
-
if self.
|
|
4627
|
-
self.
|
|
5066
|
+
if self.shard_group:
|
|
5067
|
+
self.shard_group.validate()
|
|
5068
|
+
if self.shard_hash:
|
|
5069
|
+
self.shard_hash.validate()
|
|
4628
5070
|
|
|
4629
5071
|
def to_map(self):
|
|
4630
5072
|
_map = super().to_map()
|
|
@@ -4632,48 +5074,24 @@ class ScheduledSQL(TeaModel):
|
|
|
4632
5074
|
return _map
|
|
4633
5075
|
|
|
4634
5076
|
result = dict()
|
|
4635
|
-
if self.
|
|
4636
|
-
result['
|
|
4637
|
-
if self.
|
|
4638
|
-
result['
|
|
4639
|
-
if self.
|
|
4640
|
-
result['
|
|
4641
|
-
if self.display_name is not None:
|
|
4642
|
-
result['displayName'] = self.display_name
|
|
4643
|
-
if self.last_modified_time is not None:
|
|
4644
|
-
result['lastModifiedTime'] = self.last_modified_time
|
|
4645
|
-
if self.name is not None:
|
|
4646
|
-
result['name'] = self.name
|
|
4647
|
-
if self.schedule is not None:
|
|
4648
|
-
result['schedule'] = self.schedule.to_map()
|
|
4649
|
-
if self.schedule_id is not None:
|
|
4650
|
-
result['scheduleId'] = self.schedule_id
|
|
4651
|
-
if self.status is not None:
|
|
4652
|
-
result['status'] = self.status
|
|
5077
|
+
if self.query_active_time is not None:
|
|
5078
|
+
result['queryActiveTime'] = self.query_active_time
|
|
5079
|
+
if self.shard_group is not None:
|
|
5080
|
+
result['shardGroup'] = self.shard_group.to_map()
|
|
5081
|
+
if self.shard_hash is not None:
|
|
5082
|
+
result['shardHash'] = self.shard_hash.to_map()
|
|
4653
5083
|
return result
|
|
4654
5084
|
|
|
4655
5085
|
def from_map(self, m: dict = None):
|
|
4656
5086
|
m = m or dict()
|
|
4657
|
-
if m.get('
|
|
4658
|
-
|
|
4659
|
-
|
|
4660
|
-
|
|
4661
|
-
self.
|
|
4662
|
-
if m.get('
|
|
4663
|
-
|
|
4664
|
-
|
|
4665
|
-
self.display_name = m.get('displayName')
|
|
4666
|
-
if m.get('lastModifiedTime') is not None:
|
|
4667
|
-
self.last_modified_time = m.get('lastModifiedTime')
|
|
4668
|
-
if m.get('name') is not None:
|
|
4669
|
-
self.name = m.get('name')
|
|
4670
|
-
if m.get('schedule') is not None:
|
|
4671
|
-
temp_model = Schedule()
|
|
4672
|
-
self.schedule = temp_model.from_map(m['schedule'])
|
|
4673
|
-
if m.get('scheduleId') is not None:
|
|
4674
|
-
self.schedule_id = m.get('scheduleId')
|
|
4675
|
-
if m.get('status') is not None:
|
|
4676
|
-
self.status = m.get('status')
|
|
5087
|
+
if m.get('queryActiveTime') is not None:
|
|
5088
|
+
self.query_active_time = m.get('queryActiveTime')
|
|
5089
|
+
if m.get('shardGroup') is not None:
|
|
5090
|
+
temp_model = ShardingPolicyShardGroup()
|
|
5091
|
+
self.shard_group = temp_model.from_map(m['shardGroup'])
|
|
5092
|
+
if m.get('shardHash') is not None:
|
|
5093
|
+
temp_model = ShardingPolicyShardHash()
|
|
5094
|
+
self.shard_hash = temp_model.from_map(m['shardHash'])
|
|
4677
5095
|
return self
|
|
4678
5096
|
|
|
4679
5097
|
|
|
@@ -5032,6 +5450,7 @@ class Index(TeaModel):
|
|
|
5032
5450
|
log_reduce_black_list: List[str] = None,
|
|
5033
5451
|
log_reduce_white_list: List[str] = None,
|
|
5034
5452
|
max_text_len: int = None,
|
|
5453
|
+
scan_index: bool = None,
|
|
5035
5454
|
):
|
|
5036
5455
|
self.keys = keys
|
|
5037
5456
|
self.line = line
|
|
@@ -5039,6 +5458,7 @@ class Index(TeaModel):
|
|
|
5039
5458
|
self.log_reduce_black_list = log_reduce_black_list
|
|
5040
5459
|
self.log_reduce_white_list = log_reduce_white_list
|
|
5041
5460
|
self.max_text_len = max_text_len
|
|
5461
|
+
self.scan_index = scan_index
|
|
5042
5462
|
|
|
5043
5463
|
def validate(self):
|
|
5044
5464
|
if self.keys:
|
|
@@ -5068,6 +5488,8 @@ class Index(TeaModel):
|
|
|
5068
5488
|
result['log_reduce_white_list'] = self.log_reduce_white_list
|
|
5069
5489
|
if self.max_text_len is not None:
|
|
5070
5490
|
result['max_text_len'] = self.max_text_len
|
|
5491
|
+
if self.scan_index is not None:
|
|
5492
|
+
result['scan_index'] = self.scan_index
|
|
5071
5493
|
return result
|
|
5072
5494
|
|
|
5073
5495
|
def from_map(self, m: dict = None):
|
|
@@ -5088,6 +5510,8 @@ class Index(TeaModel):
|
|
|
5088
5510
|
self.log_reduce_white_list = m.get('log_reduce_white_list')
|
|
5089
5511
|
if m.get('max_text_len') is not None:
|
|
5090
5512
|
self.max_text_len = m.get('max_text_len')
|
|
5513
|
+
if m.get('scan_index') is not None:
|
|
5514
|
+
self.scan_index = m.get('scan_index')
|
|
5091
5515
|
return self
|
|
5092
5516
|
|
|
5093
5517
|
|
|
@@ -5186,6 +5610,7 @@ class Logstore(TeaModel):
|
|
|
5186
5610
|
processor_id: str = None,
|
|
5187
5611
|
product_type: str = None,
|
|
5188
5612
|
shard_count: int = None,
|
|
5613
|
+
sharding_policy: ShardingPolicy = None,
|
|
5189
5614
|
telemetry_type: str = None,
|
|
5190
5615
|
ttl: int = None,
|
|
5191
5616
|
):
|
|
@@ -5205,6 +5630,7 @@ class Logstore(TeaModel):
|
|
|
5205
5630
|
self.product_type = product_type
|
|
5206
5631
|
# This parameter is required.
|
|
5207
5632
|
self.shard_count = shard_count
|
|
5633
|
+
self.sharding_policy = sharding_policy
|
|
5208
5634
|
self.telemetry_type = telemetry_type
|
|
5209
5635
|
# This parameter is required.
|
|
5210
5636
|
self.ttl = ttl
|
|
@@ -5212,6 +5638,8 @@ class Logstore(TeaModel):
|
|
|
5212
5638
|
def validate(self):
|
|
5213
5639
|
if self.encrypt_conf:
|
|
5214
5640
|
self.encrypt_conf.validate()
|
|
5641
|
+
if self.sharding_policy:
|
|
5642
|
+
self.sharding_policy.validate()
|
|
5215
5643
|
|
|
5216
5644
|
def to_map(self):
|
|
5217
5645
|
_map = super().to_map()
|
|
@@ -5247,6 +5675,8 @@ class Logstore(TeaModel):
|
|
|
5247
5675
|
result['productType'] = self.product_type
|
|
5248
5676
|
if self.shard_count is not None:
|
|
5249
5677
|
result['shardCount'] = self.shard_count
|
|
5678
|
+
if self.sharding_policy is not None:
|
|
5679
|
+
result['shardingPolicy'] = self.sharding_policy.to_map()
|
|
5250
5680
|
if self.telemetry_type is not None:
|
|
5251
5681
|
result['telemetryType'] = self.telemetry_type
|
|
5252
5682
|
if self.ttl is not None:
|
|
@@ -5284,6 +5714,9 @@ class Logstore(TeaModel):
|
|
|
5284
5714
|
self.product_type = m.get('productType')
|
|
5285
5715
|
if m.get('shardCount') is not None:
|
|
5286
5716
|
self.shard_count = m.get('shardCount')
|
|
5717
|
+
if m.get('shardingPolicy') is not None:
|
|
5718
|
+
temp_model = ShardingPolicy()
|
|
5719
|
+
self.sharding_policy = temp_model.from_map(m['shardingPolicy'])
|
|
5287
5720
|
if m.get('telemetryType') is not None:
|
|
5288
5721
|
self.telemetry_type = m.get('telemetryType')
|
|
5289
5722
|
if m.get('ttl') is not None:
|
|
@@ -5438,7 +5871,6 @@ class Project(TeaModel):
|
|
|
5438
5871
|
data_redundancy_type: str = None,
|
|
5439
5872
|
description: str = None,
|
|
5440
5873
|
last_modify_time: str = None,
|
|
5441
|
-
location: str = None,
|
|
5442
5874
|
owner: str = None,
|
|
5443
5875
|
project_name: str = None,
|
|
5444
5876
|
quota: Dict[str, Any] = None,
|
|
@@ -5452,7 +5884,6 @@ class Project(TeaModel):
|
|
|
5452
5884
|
# This parameter is required.
|
|
5453
5885
|
self.description = description
|
|
5454
5886
|
self.last_modify_time = last_modify_time
|
|
5455
|
-
self.location = location
|
|
5456
5887
|
self.owner = owner
|
|
5457
5888
|
# This parameter is required.
|
|
5458
5889
|
self.project_name = project_name
|
|
@@ -5479,8 +5910,6 @@ class Project(TeaModel):
|
|
|
5479
5910
|
result['description'] = self.description
|
|
5480
5911
|
if self.last_modify_time is not None:
|
|
5481
5912
|
result['lastModifyTime'] = self.last_modify_time
|
|
5482
|
-
if self.location is not None:
|
|
5483
|
-
result['location'] = self.location
|
|
5484
5913
|
if self.owner is not None:
|
|
5485
5914
|
result['owner'] = self.owner
|
|
5486
5915
|
if self.project_name is not None:
|
|
@@ -5507,8 +5936,6 @@ class Project(TeaModel):
|
|
|
5507
5936
|
self.description = m.get('description')
|
|
5508
5937
|
if m.get('lastModifyTime') is not None:
|
|
5509
5938
|
self.last_modify_time = m.get('lastModifyTime')
|
|
5510
|
-
if m.get('location') is not None:
|
|
5511
|
-
self.location = m.get('location')
|
|
5512
5939
|
if m.get('owner') is not None:
|
|
5513
5940
|
self.owner = m.get('owner')
|
|
5514
5941
|
if m.get('projectName') is not None:
|
|
@@ -6293,6 +6720,98 @@ class CreateAnnotationLabelResponse(TeaModel):
|
|
|
6293
6720
|
return self
|
|
6294
6721
|
|
|
6295
6722
|
|
|
6723
|
+
class CreateAzureBlobIngestionRequest(TeaModel):
|
|
6724
|
+
def __init__(
|
|
6725
|
+
self,
|
|
6726
|
+
configuration: AzureBlobIngestionConfiguration = None,
|
|
6727
|
+
description: str = None,
|
|
6728
|
+
display_name: str = None,
|
|
6729
|
+
name: str = None,
|
|
6730
|
+
schedule: Schedule = None,
|
|
6731
|
+
):
|
|
6732
|
+
# This parameter is required.
|
|
6733
|
+
self.configuration = configuration
|
|
6734
|
+
self.description = description
|
|
6735
|
+
# This parameter is required.
|
|
6736
|
+
self.display_name = display_name
|
|
6737
|
+
# This parameter is required.
|
|
6738
|
+
self.name = name
|
|
6739
|
+
self.schedule = schedule
|
|
6740
|
+
|
|
6741
|
+
def validate(self):
|
|
6742
|
+
if self.configuration:
|
|
6743
|
+
self.configuration.validate()
|
|
6744
|
+
if self.schedule:
|
|
6745
|
+
self.schedule.validate()
|
|
6746
|
+
|
|
6747
|
+
def to_map(self):
|
|
6748
|
+
_map = super().to_map()
|
|
6749
|
+
if _map is not None:
|
|
6750
|
+
return _map
|
|
6751
|
+
|
|
6752
|
+
result = dict()
|
|
6753
|
+
if self.configuration is not None:
|
|
6754
|
+
result['configuration'] = self.configuration.to_map()
|
|
6755
|
+
if self.description is not None:
|
|
6756
|
+
result['description'] = self.description
|
|
6757
|
+
if self.display_name is not None:
|
|
6758
|
+
result['displayName'] = self.display_name
|
|
6759
|
+
if self.name is not None:
|
|
6760
|
+
result['name'] = self.name
|
|
6761
|
+
if self.schedule is not None:
|
|
6762
|
+
result['schedule'] = self.schedule.to_map()
|
|
6763
|
+
return result
|
|
6764
|
+
|
|
6765
|
+
def from_map(self, m: dict = None):
|
|
6766
|
+
m = m or dict()
|
|
6767
|
+
if m.get('configuration') is not None:
|
|
6768
|
+
temp_model = AzureBlobIngestionConfiguration()
|
|
6769
|
+
self.configuration = temp_model.from_map(m['configuration'])
|
|
6770
|
+
if m.get('description') is not None:
|
|
6771
|
+
self.description = m.get('description')
|
|
6772
|
+
if m.get('displayName') is not None:
|
|
6773
|
+
self.display_name = m.get('displayName')
|
|
6774
|
+
if m.get('name') is not None:
|
|
6775
|
+
self.name = m.get('name')
|
|
6776
|
+
if m.get('schedule') is not None:
|
|
6777
|
+
temp_model = Schedule()
|
|
6778
|
+
self.schedule = temp_model.from_map(m['schedule'])
|
|
6779
|
+
return self
|
|
6780
|
+
|
|
6781
|
+
|
|
6782
|
+
class CreateAzureBlobIngestionResponse(TeaModel):
|
|
6783
|
+
def __init__(
|
|
6784
|
+
self,
|
|
6785
|
+
headers: Dict[str, str] = None,
|
|
6786
|
+
status_code: int = None,
|
|
6787
|
+
):
|
|
6788
|
+
self.headers = headers
|
|
6789
|
+
self.status_code = status_code
|
|
6790
|
+
|
|
6791
|
+
def validate(self):
|
|
6792
|
+
pass
|
|
6793
|
+
|
|
6794
|
+
def to_map(self):
|
|
6795
|
+
_map = super().to_map()
|
|
6796
|
+
if _map is not None:
|
|
6797
|
+
return _map
|
|
6798
|
+
|
|
6799
|
+
result = dict()
|
|
6800
|
+
if self.headers is not None:
|
|
6801
|
+
result['headers'] = self.headers
|
|
6802
|
+
if self.status_code is not None:
|
|
6803
|
+
result['statusCode'] = self.status_code
|
|
6804
|
+
return result
|
|
6805
|
+
|
|
6806
|
+
def from_map(self, m: dict = None):
|
|
6807
|
+
m = m or dict()
|
|
6808
|
+
if m.get('headers') is not None:
|
|
6809
|
+
self.headers = m.get('headers')
|
|
6810
|
+
if m.get('statusCode') is not None:
|
|
6811
|
+
self.status_code = m.get('statusCode')
|
|
6812
|
+
return self
|
|
6813
|
+
|
|
6814
|
+
|
|
6296
6815
|
class CreateConfigRequest(TeaModel):
|
|
6297
6816
|
def __init__(
|
|
6298
6817
|
self,
|
|
@@ -6901,6 +7420,98 @@ class CreateETLResponse(TeaModel):
|
|
|
6901
7420
|
return self
|
|
6902
7421
|
|
|
6903
7422
|
|
|
7423
|
+
class CreateElasticsearchIngestionRequest(TeaModel):
|
|
7424
|
+
def __init__(
|
|
7425
|
+
self,
|
|
7426
|
+
configuration: ESIngestionConfiguration = None,
|
|
7427
|
+
description: str = None,
|
|
7428
|
+
display_name: str = None,
|
|
7429
|
+
name: str = None,
|
|
7430
|
+
schedule: Schedule = None,
|
|
7431
|
+
):
|
|
7432
|
+
# This parameter is required.
|
|
7433
|
+
self.configuration = configuration
|
|
7434
|
+
self.description = description
|
|
7435
|
+
# This parameter is required.
|
|
7436
|
+
self.display_name = display_name
|
|
7437
|
+
# This parameter is required.
|
|
7438
|
+
self.name = name
|
|
7439
|
+
self.schedule = schedule
|
|
7440
|
+
|
|
7441
|
+
def validate(self):
|
|
7442
|
+
if self.configuration:
|
|
7443
|
+
self.configuration.validate()
|
|
7444
|
+
if self.schedule:
|
|
7445
|
+
self.schedule.validate()
|
|
7446
|
+
|
|
7447
|
+
def to_map(self):
|
|
7448
|
+
_map = super().to_map()
|
|
7449
|
+
if _map is not None:
|
|
7450
|
+
return _map
|
|
7451
|
+
|
|
7452
|
+
result = dict()
|
|
7453
|
+
if self.configuration is not None:
|
|
7454
|
+
result['configuration'] = self.configuration.to_map()
|
|
7455
|
+
if self.description is not None:
|
|
7456
|
+
result['description'] = self.description
|
|
7457
|
+
if self.display_name is not None:
|
|
7458
|
+
result['displayName'] = self.display_name
|
|
7459
|
+
if self.name is not None:
|
|
7460
|
+
result['name'] = self.name
|
|
7461
|
+
if self.schedule is not None:
|
|
7462
|
+
result['schedule'] = self.schedule.to_map()
|
|
7463
|
+
return result
|
|
7464
|
+
|
|
7465
|
+
def from_map(self, m: dict = None):
|
|
7466
|
+
m = m or dict()
|
|
7467
|
+
if m.get('configuration') is not None:
|
|
7468
|
+
temp_model = ESIngestionConfiguration()
|
|
7469
|
+
self.configuration = temp_model.from_map(m['configuration'])
|
|
7470
|
+
if m.get('description') is not None:
|
|
7471
|
+
self.description = m.get('description')
|
|
7472
|
+
if m.get('displayName') is not None:
|
|
7473
|
+
self.display_name = m.get('displayName')
|
|
7474
|
+
if m.get('name') is not None:
|
|
7475
|
+
self.name = m.get('name')
|
|
7476
|
+
if m.get('schedule') is not None:
|
|
7477
|
+
temp_model = Schedule()
|
|
7478
|
+
self.schedule = temp_model.from_map(m['schedule'])
|
|
7479
|
+
return self
|
|
7480
|
+
|
|
7481
|
+
|
|
7482
|
+
class CreateElasticsearchIngestionResponse(TeaModel):
|
|
7483
|
+
def __init__(
|
|
7484
|
+
self,
|
|
7485
|
+
headers: Dict[str, str] = None,
|
|
7486
|
+
status_code: int = None,
|
|
7487
|
+
):
|
|
7488
|
+
self.headers = headers
|
|
7489
|
+
self.status_code = status_code
|
|
7490
|
+
|
|
7491
|
+
def validate(self):
|
|
7492
|
+
pass
|
|
7493
|
+
|
|
7494
|
+
def to_map(self):
|
|
7495
|
+
_map = super().to_map()
|
|
7496
|
+
if _map is not None:
|
|
7497
|
+
return _map
|
|
7498
|
+
|
|
7499
|
+
result = dict()
|
|
7500
|
+
if self.headers is not None:
|
|
7501
|
+
result['headers'] = self.headers
|
|
7502
|
+
if self.status_code is not None:
|
|
7503
|
+
result['statusCode'] = self.status_code
|
|
7504
|
+
return result
|
|
7505
|
+
|
|
7506
|
+
def from_map(self, m: dict = None):
|
|
7507
|
+
m = m or dict()
|
|
7508
|
+
if m.get('headers') is not None:
|
|
7509
|
+
self.headers = m.get('headers')
|
|
7510
|
+
if m.get('statusCode') is not None:
|
|
7511
|
+
self.status_code = m.get('statusCode')
|
|
7512
|
+
return self
|
|
7513
|
+
|
|
7514
|
+
|
|
6904
7515
|
class CreateIndexRequest(TeaModel):
|
|
6905
7516
|
def __init__(
|
|
6906
7517
|
self,
|
|
@@ -6978,6 +7589,7 @@ class CreateLogStoreRequest(TeaModel):
|
|
|
6978
7589
|
mode: str = None,
|
|
6979
7590
|
processor_id: str = None,
|
|
6980
7591
|
shard_count: int = None,
|
|
7592
|
+
sharding_policy: ShardingPolicy = None,
|
|
6981
7593
|
telemetry_type: str = None,
|
|
6982
7594
|
ttl: int = None,
|
|
6983
7595
|
):
|
|
@@ -7030,6 +7642,7 @@ class CreateLogStoreRequest(TeaModel):
|
|
|
7030
7642
|
#
|
|
7031
7643
|
# This parameter is required.
|
|
7032
7644
|
self.shard_count = shard_count
|
|
7645
|
+
self.sharding_policy = sharding_policy
|
|
7033
7646
|
# The type of the observable data. Valid values:
|
|
7034
7647
|
#
|
|
7035
7648
|
# * **None** (default): log data
|
|
@@ -7043,6 +7656,8 @@ class CreateLogStoreRequest(TeaModel):
|
|
|
7043
7656
|
def validate(self):
|
|
7044
7657
|
if self.encrypt_conf:
|
|
7045
7658
|
self.encrypt_conf.validate()
|
|
7659
|
+
if self.sharding_policy:
|
|
7660
|
+
self.sharding_policy.validate()
|
|
7046
7661
|
|
|
7047
7662
|
def to_map(self):
|
|
7048
7663
|
_map = super().to_map()
|
|
@@ -7072,6 +7687,8 @@ class CreateLogStoreRequest(TeaModel):
|
|
|
7072
7687
|
result['processorId'] = self.processor_id
|
|
7073
7688
|
if self.shard_count is not None:
|
|
7074
7689
|
result['shardCount'] = self.shard_count
|
|
7690
|
+
if self.sharding_policy is not None:
|
|
7691
|
+
result['shardingPolicy'] = self.sharding_policy.to_map()
|
|
7075
7692
|
if self.telemetry_type is not None:
|
|
7076
7693
|
result['telemetryType'] = self.telemetry_type
|
|
7077
7694
|
if self.ttl is not None:
|
|
@@ -7103,6 +7720,9 @@ class CreateLogStoreRequest(TeaModel):
|
|
|
7103
7720
|
self.processor_id = m.get('processorId')
|
|
7104
7721
|
if m.get('shardCount') is not None:
|
|
7105
7722
|
self.shard_count = m.get('shardCount')
|
|
7723
|
+
if m.get('shardingPolicy') is not None:
|
|
7724
|
+
temp_model = ShardingPolicy()
|
|
7725
|
+
self.sharding_policy = temp_model.from_map(m['shardingPolicy'])
|
|
7106
7726
|
if m.get('telemetryType') is not None:
|
|
7107
7727
|
self.telemetry_type = m.get('telemetryType')
|
|
7108
7728
|
if m.get('ttl') is not None:
|
|
@@ -7649,6 +8269,7 @@ class CreateMaxComputeExportResponse(TeaModel):
|
|
|
7649
8269
|
class CreateMetricStoreRequest(TeaModel):
|
|
7650
8270
|
def __init__(
|
|
7651
8271
|
self,
|
|
8272
|
+
append_meta: bool = None,
|
|
7652
8273
|
auto_split: bool = None,
|
|
7653
8274
|
hot_ttl: int = None,
|
|
7654
8275
|
infrequent_access_ttl: int = None,
|
|
@@ -7657,8 +8278,10 @@ class CreateMetricStoreRequest(TeaModel):
|
|
|
7657
8278
|
mode: str = None,
|
|
7658
8279
|
name: str = None,
|
|
7659
8280
|
shard_count: int = None,
|
|
8281
|
+
sharding_policy: ShardingPolicy = None,
|
|
7660
8282
|
ttl: int = None,
|
|
7661
8283
|
):
|
|
8284
|
+
self.append_meta = append_meta
|
|
7662
8285
|
# Specifies whether to enable automatic sharding.
|
|
7663
8286
|
self.auto_split = auto_split
|
|
7664
8287
|
self.hot_ttl = hot_ttl
|
|
@@ -7677,13 +8300,15 @@ class CreateMetricStoreRequest(TeaModel):
|
|
|
7677
8300
|
#
|
|
7678
8301
|
# This parameter is required.
|
|
7679
8302
|
self.shard_count = shard_count
|
|
8303
|
+
self.sharding_policy = sharding_policy
|
|
7680
8304
|
# The retention period of the metric data in the Metricstore. Unit: days.
|
|
7681
8305
|
#
|
|
7682
8306
|
# This parameter is required.
|
|
7683
8307
|
self.ttl = ttl
|
|
7684
8308
|
|
|
7685
8309
|
def validate(self):
|
|
7686
|
-
|
|
8310
|
+
if self.sharding_policy:
|
|
8311
|
+
self.sharding_policy.validate()
|
|
7687
8312
|
|
|
7688
8313
|
def to_map(self):
|
|
7689
8314
|
_map = super().to_map()
|
|
@@ -7691,6 +8316,8 @@ class CreateMetricStoreRequest(TeaModel):
|
|
|
7691
8316
|
return _map
|
|
7692
8317
|
|
|
7693
8318
|
result = dict()
|
|
8319
|
+
if self.append_meta is not None:
|
|
8320
|
+
result['appendMeta'] = self.append_meta
|
|
7694
8321
|
if self.auto_split is not None:
|
|
7695
8322
|
result['autoSplit'] = self.auto_split
|
|
7696
8323
|
if self.hot_ttl is not None:
|
|
@@ -7707,12 +8334,16 @@ class CreateMetricStoreRequest(TeaModel):
|
|
|
7707
8334
|
result['name'] = self.name
|
|
7708
8335
|
if self.shard_count is not None:
|
|
7709
8336
|
result['shardCount'] = self.shard_count
|
|
8337
|
+
if self.sharding_policy is not None:
|
|
8338
|
+
result['shardingPolicy'] = self.sharding_policy.to_map()
|
|
7710
8339
|
if self.ttl is not None:
|
|
7711
8340
|
result['ttl'] = self.ttl
|
|
7712
8341
|
return result
|
|
7713
8342
|
|
|
7714
8343
|
def from_map(self, m: dict = None):
|
|
7715
8344
|
m = m or dict()
|
|
8345
|
+
if m.get('appendMeta') is not None:
|
|
8346
|
+
self.append_meta = m.get('appendMeta')
|
|
7716
8347
|
if m.get('autoSplit') is not None:
|
|
7717
8348
|
self.auto_split = m.get('autoSplit')
|
|
7718
8349
|
if m.get('hot_ttl') is not None:
|
|
@@ -7729,6 +8360,9 @@ class CreateMetricStoreRequest(TeaModel):
|
|
|
7729
8360
|
self.name = m.get('name')
|
|
7730
8361
|
if m.get('shardCount') is not None:
|
|
7731
8362
|
self.shard_count = m.get('shardCount')
|
|
8363
|
+
if m.get('shardingPolicy') is not None:
|
|
8364
|
+
temp_model = ShardingPolicy()
|
|
8365
|
+
self.sharding_policy = temp_model.from_map(m['shardingPolicy'])
|
|
7732
8366
|
if m.get('ttl') is not None:
|
|
7733
8367
|
self.ttl = m.get('ttl')
|
|
7734
8368
|
return self
|
|
@@ -8738,9 +9372,38 @@ class DeleteAgentInstanceConfigRequest(TeaModel):
|
|
|
8738
9372
|
class DeleteAgentInstanceConfigShrinkRequest(TeaModel):
|
|
8739
9373
|
def __init__(
|
|
8740
9374
|
self,
|
|
8741
|
-
attributes_shrink: str = None,
|
|
9375
|
+
attributes_shrink: str = None,
|
|
9376
|
+
):
|
|
9377
|
+
self.attributes_shrink = attributes_shrink
|
|
9378
|
+
|
|
9379
|
+
def validate(self):
|
|
9380
|
+
pass
|
|
9381
|
+
|
|
9382
|
+
def to_map(self):
|
|
9383
|
+
_map = super().to_map()
|
|
9384
|
+
if _map is not None:
|
|
9385
|
+
return _map
|
|
9386
|
+
|
|
9387
|
+
result = dict()
|
|
9388
|
+
if self.attributes_shrink is not None:
|
|
9389
|
+
result['attributes'] = self.attributes_shrink
|
|
9390
|
+
return result
|
|
9391
|
+
|
|
9392
|
+
def from_map(self, m: dict = None):
|
|
9393
|
+
m = m or dict()
|
|
9394
|
+
if m.get('attributes') is not None:
|
|
9395
|
+
self.attributes_shrink = m.get('attributes')
|
|
9396
|
+
return self
|
|
9397
|
+
|
|
9398
|
+
|
|
9399
|
+
class DeleteAgentInstanceConfigResponse(TeaModel):
|
|
9400
|
+
def __init__(
|
|
9401
|
+
self,
|
|
9402
|
+
headers: Dict[str, str] = None,
|
|
9403
|
+
status_code: int = None,
|
|
8742
9404
|
):
|
|
8743
|
-
self.
|
|
9405
|
+
self.headers = headers
|
|
9406
|
+
self.status_code = status_code
|
|
8744
9407
|
|
|
8745
9408
|
def validate(self):
|
|
8746
9409
|
pass
|
|
@@ -8751,18 +9414,22 @@ class DeleteAgentInstanceConfigShrinkRequest(TeaModel):
|
|
|
8751
9414
|
return _map
|
|
8752
9415
|
|
|
8753
9416
|
result = dict()
|
|
8754
|
-
if self.
|
|
8755
|
-
result['
|
|
9417
|
+
if self.headers is not None:
|
|
9418
|
+
result['headers'] = self.headers
|
|
9419
|
+
if self.status_code is not None:
|
|
9420
|
+
result['statusCode'] = self.status_code
|
|
8756
9421
|
return result
|
|
8757
9422
|
|
|
8758
9423
|
def from_map(self, m: dict = None):
|
|
8759
9424
|
m = m or dict()
|
|
8760
|
-
if m.get('
|
|
8761
|
-
self.
|
|
9425
|
+
if m.get('headers') is not None:
|
|
9426
|
+
self.headers = m.get('headers')
|
|
9427
|
+
if m.get('statusCode') is not None:
|
|
9428
|
+
self.status_code = m.get('statusCode')
|
|
8762
9429
|
return self
|
|
8763
9430
|
|
|
8764
9431
|
|
|
8765
|
-
class
|
|
9432
|
+
class DeleteAlertResponse(TeaModel):
|
|
8766
9433
|
def __init__(
|
|
8767
9434
|
self,
|
|
8768
9435
|
headers: Dict[str, str] = None,
|
|
@@ -8795,7 +9462,7 @@ class DeleteAgentInstanceConfigResponse(TeaModel):
|
|
|
8795
9462
|
return self
|
|
8796
9463
|
|
|
8797
9464
|
|
|
8798
|
-
class
|
|
9465
|
+
class DeleteAnnotationDataResponse(TeaModel):
|
|
8799
9466
|
def __init__(
|
|
8800
9467
|
self,
|
|
8801
9468
|
headers: Dict[str, str] = None,
|
|
@@ -8828,7 +9495,7 @@ class DeleteAlertResponse(TeaModel):
|
|
|
8828
9495
|
return self
|
|
8829
9496
|
|
|
8830
9497
|
|
|
8831
|
-
class
|
|
9498
|
+
class DeleteAnnotationDataSetResponse(TeaModel):
|
|
8832
9499
|
def __init__(
|
|
8833
9500
|
self,
|
|
8834
9501
|
headers: Dict[str, str] = None,
|
|
@@ -8861,7 +9528,7 @@ class DeleteAnnotationDataResponse(TeaModel):
|
|
|
8861
9528
|
return self
|
|
8862
9529
|
|
|
8863
9530
|
|
|
8864
|
-
class
|
|
9531
|
+
class DeleteAnnotationLabelResponse(TeaModel):
|
|
8865
9532
|
def __init__(
|
|
8866
9533
|
self,
|
|
8867
9534
|
headers: Dict[str, str] = None,
|
|
@@ -8894,7 +9561,7 @@ class DeleteAnnotationDataSetResponse(TeaModel):
|
|
|
8894
9561
|
return self
|
|
8895
9562
|
|
|
8896
9563
|
|
|
8897
|
-
class
|
|
9564
|
+
class DeleteAzureBlobIngestionResponse(TeaModel):
|
|
8898
9565
|
def __init__(
|
|
8899
9566
|
self,
|
|
8900
9567
|
headers: Dict[str, str] = None,
|
|
@@ -9224,6 +9891,39 @@ class DeleteETLResponse(TeaModel):
|
|
|
9224
9891
|
return self
|
|
9225
9892
|
|
|
9226
9893
|
|
|
9894
|
+
class DeleteElasticsearchIngestionResponse(TeaModel):
|
|
9895
|
+
def __init__(
|
|
9896
|
+
self,
|
|
9897
|
+
headers: Dict[str, str] = None,
|
|
9898
|
+
status_code: int = None,
|
|
9899
|
+
):
|
|
9900
|
+
self.headers = headers
|
|
9901
|
+
self.status_code = status_code
|
|
9902
|
+
|
|
9903
|
+
def validate(self):
|
|
9904
|
+
pass
|
|
9905
|
+
|
|
9906
|
+
def to_map(self):
|
|
9907
|
+
_map = super().to_map()
|
|
9908
|
+
if _map is not None:
|
|
9909
|
+
return _map
|
|
9910
|
+
|
|
9911
|
+
result = dict()
|
|
9912
|
+
if self.headers is not None:
|
|
9913
|
+
result['headers'] = self.headers
|
|
9914
|
+
if self.status_code is not None:
|
|
9915
|
+
result['statusCode'] = self.status_code
|
|
9916
|
+
return result
|
|
9917
|
+
|
|
9918
|
+
def from_map(self, m: dict = None):
|
|
9919
|
+
m = m or dict()
|
|
9920
|
+
if m.get('headers') is not None:
|
|
9921
|
+
self.headers = m.get('headers')
|
|
9922
|
+
if m.get('statusCode') is not None:
|
|
9923
|
+
self.status_code = m.get('statusCode')
|
|
9924
|
+
return self
|
|
9925
|
+
|
|
9926
|
+
|
|
9227
9927
|
class DeleteIndexResponse(TeaModel):
|
|
9228
9928
|
def __init__(
|
|
9229
9929
|
self,
|
|
@@ -9850,12 +10550,14 @@ class DescribeRegionsResponseBodyRegions(TeaModel):
|
|
|
9850
10550
|
def __init__(
|
|
9851
10551
|
self,
|
|
9852
10552
|
data_redundancy_type: List[str] = None,
|
|
10553
|
+
internal_endpoint: str = None,
|
|
9853
10554
|
internet_endpoint: str = None,
|
|
9854
10555
|
intranet_endpoint: str = None,
|
|
9855
10556
|
local_name: str = None,
|
|
9856
10557
|
region: str = None,
|
|
9857
10558
|
):
|
|
9858
10559
|
self.data_redundancy_type = data_redundancy_type
|
|
10560
|
+
self.internal_endpoint = internal_endpoint
|
|
9859
10561
|
# The public endpoint of Simple Log Service.
|
|
9860
10562
|
self.internet_endpoint = internet_endpoint
|
|
9861
10563
|
# The internal endpoint of Simple Log Service.
|
|
@@ -9876,6 +10578,8 @@ class DescribeRegionsResponseBodyRegions(TeaModel):
|
|
|
9876
10578
|
result = dict()
|
|
9877
10579
|
if self.data_redundancy_type is not None:
|
|
9878
10580
|
result['dataRedundancyType'] = self.data_redundancy_type
|
|
10581
|
+
if self.internal_endpoint is not None:
|
|
10582
|
+
result['internalEndpoint'] = self.internal_endpoint
|
|
9879
10583
|
if self.internet_endpoint is not None:
|
|
9880
10584
|
result['internetEndpoint'] = self.internet_endpoint
|
|
9881
10585
|
if self.intranet_endpoint is not None:
|
|
@@ -9890,6 +10594,8 @@ class DescribeRegionsResponseBodyRegions(TeaModel):
|
|
|
9890
10594
|
m = m or dict()
|
|
9891
10595
|
if m.get('dataRedundancyType') is not None:
|
|
9892
10596
|
self.data_redundancy_type = m.get('dataRedundancyType')
|
|
10597
|
+
if m.get('internalEndpoint') is not None:
|
|
10598
|
+
self.internal_endpoint = m.get('internalEndpoint')
|
|
9893
10599
|
if m.get('internetEndpoint') is not None:
|
|
9894
10600
|
self.internet_endpoint = m.get('internetEndpoint')
|
|
9895
10601
|
if m.get('intranetEndpoint') is not None:
|
|
@@ -10585,6 +11291,47 @@ class GetAppliedMachineGroupsResponse(TeaModel):
|
|
|
10585
11291
|
return self
|
|
10586
11292
|
|
|
10587
11293
|
|
|
11294
|
+
class GetAzureBlobIngestionResponse(TeaModel):
|
|
11295
|
+
def __init__(
|
|
11296
|
+
self,
|
|
11297
|
+
headers: Dict[str, str] = None,
|
|
11298
|
+
status_code: int = None,
|
|
11299
|
+
body: AzureBlobIngestion = None,
|
|
11300
|
+
):
|
|
11301
|
+
self.headers = headers
|
|
11302
|
+
self.status_code = status_code
|
|
11303
|
+
self.body = body
|
|
11304
|
+
|
|
11305
|
+
def validate(self):
|
|
11306
|
+
if self.body:
|
|
11307
|
+
self.body.validate()
|
|
11308
|
+
|
|
11309
|
+
def to_map(self):
|
|
11310
|
+
_map = super().to_map()
|
|
11311
|
+
if _map is not None:
|
|
11312
|
+
return _map
|
|
11313
|
+
|
|
11314
|
+
result = dict()
|
|
11315
|
+
if self.headers is not None:
|
|
11316
|
+
result['headers'] = self.headers
|
|
11317
|
+
if self.status_code is not None:
|
|
11318
|
+
result['statusCode'] = self.status_code
|
|
11319
|
+
if self.body is not None:
|
|
11320
|
+
result['body'] = self.body.to_map()
|
|
11321
|
+
return result
|
|
11322
|
+
|
|
11323
|
+
def from_map(self, m: dict = None):
|
|
11324
|
+
m = m or dict()
|
|
11325
|
+
if m.get('headers') is not None:
|
|
11326
|
+
self.headers = m.get('headers')
|
|
11327
|
+
if m.get('statusCode') is not None:
|
|
11328
|
+
self.status_code = m.get('statusCode')
|
|
11329
|
+
if m.get('body') is not None:
|
|
11330
|
+
temp_model = AzureBlobIngestion()
|
|
11331
|
+
self.body = temp_model.from_map(m['body'])
|
|
11332
|
+
return self
|
|
11333
|
+
|
|
11334
|
+
|
|
10588
11335
|
class GetCheckPointRequest(TeaModel):
|
|
10589
11336
|
def __init__(
|
|
10590
11337
|
self,
|
|
@@ -11911,6 +12658,47 @@ class GetETLResponse(TeaModel):
|
|
|
11911
12658
|
return self
|
|
11912
12659
|
|
|
11913
12660
|
|
|
12661
|
+
class GetElasticsearchIngestionResponse(TeaModel):
|
|
12662
|
+
def __init__(
|
|
12663
|
+
self,
|
|
12664
|
+
headers: Dict[str, str] = None,
|
|
12665
|
+
status_code: int = None,
|
|
12666
|
+
body: ESIngestion = None,
|
|
12667
|
+
):
|
|
12668
|
+
self.headers = headers
|
|
12669
|
+
self.status_code = status_code
|
|
12670
|
+
self.body = body
|
|
12671
|
+
|
|
12672
|
+
def validate(self):
|
|
12673
|
+
if self.body:
|
|
12674
|
+
self.body.validate()
|
|
12675
|
+
|
|
12676
|
+
def to_map(self):
|
|
12677
|
+
_map = super().to_map()
|
|
12678
|
+
if _map is not None:
|
|
12679
|
+
return _map
|
|
12680
|
+
|
|
12681
|
+
result = dict()
|
|
12682
|
+
if self.headers is not None:
|
|
12683
|
+
result['headers'] = self.headers
|
|
12684
|
+
if self.status_code is not None:
|
|
12685
|
+
result['statusCode'] = self.status_code
|
|
12686
|
+
if self.body is not None:
|
|
12687
|
+
result['body'] = self.body.to_map()
|
|
12688
|
+
return result
|
|
12689
|
+
|
|
12690
|
+
def from_map(self, m: dict = None):
|
|
12691
|
+
m = m or dict()
|
|
12692
|
+
if m.get('headers') is not None:
|
|
12693
|
+
self.headers = m.get('headers')
|
|
12694
|
+
if m.get('statusCode') is not None:
|
|
12695
|
+
self.status_code = m.get('statusCode')
|
|
12696
|
+
if m.get('body') is not None:
|
|
12697
|
+
temp_model = ESIngestion()
|
|
12698
|
+
self.body = temp_model.from_map(m['body'])
|
|
12699
|
+
return self
|
|
12700
|
+
|
|
12701
|
+
|
|
11914
12702
|
class GetHistogramsRequest(TeaModel):
|
|
11915
12703
|
def __init__(
|
|
11916
12704
|
self,
|
|
@@ -13314,6 +14102,7 @@ class GetMaxComputeExportResponse(TeaModel):
|
|
|
13314
14102
|
class GetMetricStoreResponseBody(TeaModel):
|
|
13315
14103
|
def __init__(
|
|
13316
14104
|
self,
|
|
14105
|
+
append_meta: bool = None,
|
|
13317
14106
|
auto_split: bool = None,
|
|
13318
14107
|
create_time: int = None,
|
|
13319
14108
|
hot_ttl: int = None,
|
|
@@ -13324,8 +14113,10 @@ class GetMetricStoreResponseBody(TeaModel):
|
|
|
13324
14113
|
mode: str = None,
|
|
13325
14114
|
name: str = None,
|
|
13326
14115
|
shard_count: int = None,
|
|
14116
|
+
sharding_policy: ShardingPolicy = None,
|
|
13327
14117
|
ttl: int = None,
|
|
13328
14118
|
):
|
|
14119
|
+
self.append_meta = append_meta
|
|
13329
14120
|
# Indicates whether the automatic sharding feature is enabled.
|
|
13330
14121
|
self.auto_split = auto_split
|
|
13331
14122
|
# The creation time. The value is a UNIX timestamp.
|
|
@@ -13344,11 +14135,13 @@ class GetMetricStoreResponseBody(TeaModel):
|
|
|
13344
14135
|
self.name = name
|
|
13345
14136
|
# The number of shards.
|
|
13346
14137
|
self.shard_count = shard_count
|
|
14138
|
+
self.sharding_policy = sharding_policy
|
|
13347
14139
|
# The retention period. Unit: days.
|
|
13348
14140
|
self.ttl = ttl
|
|
13349
14141
|
|
|
13350
14142
|
def validate(self):
|
|
13351
|
-
|
|
14143
|
+
if self.sharding_policy:
|
|
14144
|
+
self.sharding_policy.validate()
|
|
13352
14145
|
|
|
13353
14146
|
def to_map(self):
|
|
13354
14147
|
_map = super().to_map()
|
|
@@ -13356,6 +14149,8 @@ class GetMetricStoreResponseBody(TeaModel):
|
|
|
13356
14149
|
return _map
|
|
13357
14150
|
|
|
13358
14151
|
result = dict()
|
|
14152
|
+
if self.append_meta is not None:
|
|
14153
|
+
result['appendMeta'] = self.append_meta
|
|
13359
14154
|
if self.auto_split is not None:
|
|
13360
14155
|
result['autoSplit'] = self.auto_split
|
|
13361
14156
|
if self.create_time is not None:
|
|
@@ -13376,12 +14171,16 @@ class GetMetricStoreResponseBody(TeaModel):
|
|
|
13376
14171
|
result['name'] = self.name
|
|
13377
14172
|
if self.shard_count is not None:
|
|
13378
14173
|
result['shardCount'] = self.shard_count
|
|
14174
|
+
if self.sharding_policy is not None:
|
|
14175
|
+
result['shardingPolicy'] = self.sharding_policy.to_map()
|
|
13379
14176
|
if self.ttl is not None:
|
|
13380
14177
|
result['ttl'] = self.ttl
|
|
13381
14178
|
return result
|
|
13382
14179
|
|
|
13383
14180
|
def from_map(self, m: dict = None):
|
|
13384
14181
|
m = m or dict()
|
|
14182
|
+
if m.get('appendMeta') is not None:
|
|
14183
|
+
self.append_meta = m.get('appendMeta')
|
|
13385
14184
|
if m.get('autoSplit') is not None:
|
|
13386
14185
|
self.auto_split = m.get('autoSplit')
|
|
13387
14186
|
if m.get('createTime') is not None:
|
|
@@ -13402,6 +14201,9 @@ class GetMetricStoreResponseBody(TeaModel):
|
|
|
13402
14201
|
self.name = m.get('name')
|
|
13403
14202
|
if m.get('shardCount') is not None:
|
|
13404
14203
|
self.shard_count = m.get('shardCount')
|
|
14204
|
+
if m.get('shardingPolicy') is not None:
|
|
14205
|
+
temp_model = ShardingPolicy()
|
|
14206
|
+
self.sharding_policy = temp_model.from_map(m['shardingPolicy'])
|
|
13405
14207
|
if m.get('ttl') is not None:
|
|
13406
14208
|
self.ttl = m.get('ttl')
|
|
13407
14209
|
return self
|
|
@@ -14675,24 +15477,143 @@ class ListAlertsResponseBody(TeaModel):
|
|
|
14675
15477
|
|
|
14676
15478
|
def from_map(self, m: dict = None):
|
|
14677
15479
|
m = m or dict()
|
|
14678
|
-
if m.get('count') is not None:
|
|
14679
|
-
self.count = m.get('count')
|
|
14680
|
-
self.results = []
|
|
14681
|
-
if m.get('results') is not None:
|
|
14682
|
-
for k in m.get('results'):
|
|
14683
|
-
temp_model = Alert()
|
|
14684
|
-
self.results.append(temp_model.from_map(k))
|
|
15480
|
+
if m.get('count') is not None:
|
|
15481
|
+
self.count = m.get('count')
|
|
15482
|
+
self.results = []
|
|
15483
|
+
if m.get('results') is not None:
|
|
15484
|
+
for k in m.get('results'):
|
|
15485
|
+
temp_model = Alert()
|
|
15486
|
+
self.results.append(temp_model.from_map(k))
|
|
15487
|
+
if m.get('total') is not None:
|
|
15488
|
+
self.total = m.get('total')
|
|
15489
|
+
return self
|
|
15490
|
+
|
|
15491
|
+
|
|
15492
|
+
class ListAlertsResponse(TeaModel):
|
|
15493
|
+
def __init__(
|
|
15494
|
+
self,
|
|
15495
|
+
headers: Dict[str, str] = None,
|
|
15496
|
+
status_code: int = None,
|
|
15497
|
+
body: ListAlertsResponseBody = None,
|
|
15498
|
+
):
|
|
15499
|
+
self.headers = headers
|
|
15500
|
+
self.status_code = status_code
|
|
15501
|
+
self.body = body
|
|
15502
|
+
|
|
15503
|
+
def validate(self):
|
|
15504
|
+
if self.body:
|
|
15505
|
+
self.body.validate()
|
|
15506
|
+
|
|
15507
|
+
def to_map(self):
|
|
15508
|
+
_map = super().to_map()
|
|
15509
|
+
if _map is not None:
|
|
15510
|
+
return _map
|
|
15511
|
+
|
|
15512
|
+
result = dict()
|
|
15513
|
+
if self.headers is not None:
|
|
15514
|
+
result['headers'] = self.headers
|
|
15515
|
+
if self.status_code is not None:
|
|
15516
|
+
result['statusCode'] = self.status_code
|
|
15517
|
+
if self.body is not None:
|
|
15518
|
+
result['body'] = self.body.to_map()
|
|
15519
|
+
return result
|
|
15520
|
+
|
|
15521
|
+
def from_map(self, m: dict = None):
|
|
15522
|
+
m = m or dict()
|
|
15523
|
+
if m.get('headers') is not None:
|
|
15524
|
+
self.headers = m.get('headers')
|
|
15525
|
+
if m.get('statusCode') is not None:
|
|
15526
|
+
self.status_code = m.get('statusCode')
|
|
15527
|
+
if m.get('body') is not None:
|
|
15528
|
+
temp_model = ListAlertsResponseBody()
|
|
15529
|
+
self.body = temp_model.from_map(m['body'])
|
|
15530
|
+
return self
|
|
15531
|
+
|
|
15532
|
+
|
|
15533
|
+
class ListAnnotationDataRequest(TeaModel):
|
|
15534
|
+
def __init__(
|
|
15535
|
+
self,
|
|
15536
|
+
offset: int = None,
|
|
15537
|
+
size: int = None,
|
|
15538
|
+
):
|
|
15539
|
+
# The line from which the query starts.
|
|
15540
|
+
self.offset = offset
|
|
15541
|
+
# The number of entries per page.
|
|
15542
|
+
self.size = size
|
|
15543
|
+
|
|
15544
|
+
def validate(self):
|
|
15545
|
+
pass
|
|
15546
|
+
|
|
15547
|
+
def to_map(self):
|
|
15548
|
+
_map = super().to_map()
|
|
15549
|
+
if _map is not None:
|
|
15550
|
+
return _map
|
|
15551
|
+
|
|
15552
|
+
result = dict()
|
|
15553
|
+
if self.offset is not None:
|
|
15554
|
+
result['offset'] = self.offset
|
|
15555
|
+
if self.size is not None:
|
|
15556
|
+
result['size'] = self.size
|
|
15557
|
+
return result
|
|
15558
|
+
|
|
15559
|
+
def from_map(self, m: dict = None):
|
|
15560
|
+
m = m or dict()
|
|
15561
|
+
if m.get('offset') is not None:
|
|
15562
|
+
self.offset = m.get('offset')
|
|
15563
|
+
if m.get('size') is not None:
|
|
15564
|
+
self.size = m.get('size')
|
|
15565
|
+
return self
|
|
15566
|
+
|
|
15567
|
+
|
|
15568
|
+
class ListAnnotationDataResponseBody(TeaModel):
|
|
15569
|
+
def __init__(
|
|
15570
|
+
self,
|
|
15571
|
+
data: List[MLDataParam] = None,
|
|
15572
|
+
total: int = None,
|
|
15573
|
+
):
|
|
15574
|
+
# The data returned.
|
|
15575
|
+
self.data = data
|
|
15576
|
+
# The total number of entries returned.
|
|
15577
|
+
self.total = total
|
|
15578
|
+
|
|
15579
|
+
def validate(self):
|
|
15580
|
+
if self.data:
|
|
15581
|
+
for k in self.data:
|
|
15582
|
+
if k:
|
|
15583
|
+
k.validate()
|
|
15584
|
+
|
|
15585
|
+
def to_map(self):
|
|
15586
|
+
_map = super().to_map()
|
|
15587
|
+
if _map is not None:
|
|
15588
|
+
return _map
|
|
15589
|
+
|
|
15590
|
+
result = dict()
|
|
15591
|
+
result['data'] = []
|
|
15592
|
+
if self.data is not None:
|
|
15593
|
+
for k in self.data:
|
|
15594
|
+
result['data'].append(k.to_map() if k else None)
|
|
15595
|
+
if self.total is not None:
|
|
15596
|
+
result['total'] = self.total
|
|
15597
|
+
return result
|
|
15598
|
+
|
|
15599
|
+
def from_map(self, m: dict = None):
|
|
15600
|
+
m = m or dict()
|
|
15601
|
+
self.data = []
|
|
15602
|
+
if m.get('data') is not None:
|
|
15603
|
+
for k in m.get('data'):
|
|
15604
|
+
temp_model = MLDataParam()
|
|
15605
|
+
self.data.append(temp_model.from_map(k))
|
|
14685
15606
|
if m.get('total') is not None:
|
|
14686
15607
|
self.total = m.get('total')
|
|
14687
15608
|
return self
|
|
14688
15609
|
|
|
14689
15610
|
|
|
14690
|
-
class
|
|
15611
|
+
class ListAnnotationDataResponse(TeaModel):
|
|
14691
15612
|
def __init__(
|
|
14692
15613
|
self,
|
|
14693
15614
|
headers: Dict[str, str] = None,
|
|
14694
15615
|
status_code: int = None,
|
|
14695
|
-
body:
|
|
15616
|
+
body: ListAnnotationDataResponseBody = None,
|
|
14696
15617
|
):
|
|
14697
15618
|
self.headers = headers
|
|
14698
15619
|
self.status_code = status_code
|
|
@@ -14723,12 +15644,12 @@ class ListAlertsResponse(TeaModel):
|
|
|
14723
15644
|
if m.get('statusCode') is not None:
|
|
14724
15645
|
self.status_code = m.get('statusCode')
|
|
14725
15646
|
if m.get('body') is not None:
|
|
14726
|
-
temp_model =
|
|
15647
|
+
temp_model = ListAnnotationDataResponseBody()
|
|
14727
15648
|
self.body = temp_model.from_map(m['body'])
|
|
14728
15649
|
return self
|
|
14729
15650
|
|
|
14730
15651
|
|
|
14731
|
-
class
|
|
15652
|
+
class ListAnnotationDataSetsRequest(TeaModel):
|
|
14732
15653
|
def __init__(
|
|
14733
15654
|
self,
|
|
14734
15655
|
offset: int = None,
|
|
@@ -14763,10 +15684,10 @@ class ListAnnotationDataRequest(TeaModel):
|
|
|
14763
15684
|
return self
|
|
14764
15685
|
|
|
14765
15686
|
|
|
14766
|
-
class
|
|
15687
|
+
class ListAnnotationDataSetsResponseBody(TeaModel):
|
|
14767
15688
|
def __init__(
|
|
14768
15689
|
self,
|
|
14769
|
-
data: List[
|
|
15690
|
+
data: List[MLDataSetParam] = None,
|
|
14770
15691
|
total: int = None,
|
|
14771
15692
|
):
|
|
14772
15693
|
# The data returned.
|
|
@@ -14799,19 +15720,19 @@ class ListAnnotationDataResponseBody(TeaModel):
|
|
|
14799
15720
|
self.data = []
|
|
14800
15721
|
if m.get('data') is not None:
|
|
14801
15722
|
for k in m.get('data'):
|
|
14802
|
-
temp_model =
|
|
15723
|
+
temp_model = MLDataSetParam()
|
|
14803
15724
|
self.data.append(temp_model.from_map(k))
|
|
14804
15725
|
if m.get('total') is not None:
|
|
14805
15726
|
self.total = m.get('total')
|
|
14806
15727
|
return self
|
|
14807
15728
|
|
|
14808
15729
|
|
|
14809
|
-
class
|
|
15730
|
+
class ListAnnotationDataSetsResponse(TeaModel):
|
|
14810
15731
|
def __init__(
|
|
14811
15732
|
self,
|
|
14812
15733
|
headers: Dict[str, str] = None,
|
|
14813
15734
|
status_code: int = None,
|
|
14814
|
-
body:
|
|
15735
|
+
body: ListAnnotationDataSetsResponseBody = None,
|
|
14815
15736
|
):
|
|
14816
15737
|
self.headers = headers
|
|
14817
15738
|
self.status_code = status_code
|
|
@@ -14842,12 +15763,12 @@ class ListAnnotationDataResponse(TeaModel):
|
|
|
14842
15763
|
if m.get('statusCode') is not None:
|
|
14843
15764
|
self.status_code = m.get('statusCode')
|
|
14844
15765
|
if m.get('body') is not None:
|
|
14845
|
-
temp_model =
|
|
15766
|
+
temp_model = ListAnnotationDataSetsResponseBody()
|
|
14846
15767
|
self.body = temp_model.from_map(m['body'])
|
|
14847
15768
|
return self
|
|
14848
15769
|
|
|
14849
15770
|
|
|
14850
|
-
class
|
|
15771
|
+
class ListAnnotationLabelsRequest(TeaModel):
|
|
14851
15772
|
def __init__(
|
|
14852
15773
|
self,
|
|
14853
15774
|
offset: int = None,
|
|
@@ -14882,15 +15803,15 @@ class ListAnnotationDataSetsRequest(TeaModel):
|
|
|
14882
15803
|
return self
|
|
14883
15804
|
|
|
14884
15805
|
|
|
14885
|
-
class
|
|
15806
|
+
class ListAnnotationLabelsResponseBody(TeaModel):
|
|
14886
15807
|
def __init__(
|
|
14887
15808
|
self,
|
|
14888
|
-
data: List[
|
|
15809
|
+
data: List[MLLabelParam] = None,
|
|
14889
15810
|
total: int = None,
|
|
14890
15811
|
):
|
|
14891
15812
|
# The data returned.
|
|
14892
15813
|
self.data = data
|
|
14893
|
-
# The total number of
|
|
15814
|
+
# The total number of tags that meet the query conditions.
|
|
14894
15815
|
self.total = total
|
|
14895
15816
|
|
|
14896
15817
|
def validate(self):
|
|
@@ -14918,19 +15839,19 @@ class ListAnnotationDataSetsResponseBody(TeaModel):
|
|
|
14918
15839
|
self.data = []
|
|
14919
15840
|
if m.get('data') is not None:
|
|
14920
15841
|
for k in m.get('data'):
|
|
14921
|
-
temp_model =
|
|
15842
|
+
temp_model = MLLabelParam()
|
|
14922
15843
|
self.data.append(temp_model.from_map(k))
|
|
14923
15844
|
if m.get('total') is not None:
|
|
14924
15845
|
self.total = m.get('total')
|
|
14925
15846
|
return self
|
|
14926
15847
|
|
|
14927
15848
|
|
|
14928
|
-
class
|
|
15849
|
+
class ListAnnotationLabelsResponse(TeaModel):
|
|
14929
15850
|
def __init__(
|
|
14930
15851
|
self,
|
|
14931
15852
|
headers: Dict[str, str] = None,
|
|
14932
15853
|
status_code: int = None,
|
|
14933
|
-
body:
|
|
15854
|
+
body: ListAnnotationLabelsResponseBody = None,
|
|
14934
15855
|
):
|
|
14935
15856
|
self.headers = headers
|
|
14936
15857
|
self.status_code = status_code
|
|
@@ -14961,20 +15882,20 @@ class ListAnnotationDataSetsResponse(TeaModel):
|
|
|
14961
15882
|
if m.get('statusCode') is not None:
|
|
14962
15883
|
self.status_code = m.get('statusCode')
|
|
14963
15884
|
if m.get('body') is not None:
|
|
14964
|
-
temp_model =
|
|
15885
|
+
temp_model = ListAnnotationLabelsResponseBody()
|
|
14965
15886
|
self.body = temp_model.from_map(m['body'])
|
|
14966
15887
|
return self
|
|
14967
15888
|
|
|
14968
15889
|
|
|
14969
|
-
class
|
|
15890
|
+
class ListAzureBlobIngestionRequest(TeaModel):
|
|
14970
15891
|
def __init__(
|
|
14971
15892
|
self,
|
|
15893
|
+
logstore: str = None,
|
|
14972
15894
|
offset: int = None,
|
|
14973
15895
|
size: int = None,
|
|
14974
15896
|
):
|
|
14975
|
-
|
|
15897
|
+
self.logstore = logstore
|
|
14976
15898
|
self.offset = offset
|
|
14977
|
-
# The number of entries per page.
|
|
14978
15899
|
self.size = size
|
|
14979
15900
|
|
|
14980
15901
|
def validate(self):
|
|
@@ -14986,6 +15907,8 @@ class ListAnnotationLabelsRequest(TeaModel):
|
|
|
14986
15907
|
return _map
|
|
14987
15908
|
|
|
14988
15909
|
result = dict()
|
|
15910
|
+
if self.logstore is not None:
|
|
15911
|
+
result['logstore'] = self.logstore
|
|
14989
15912
|
if self.offset is not None:
|
|
14990
15913
|
result['offset'] = self.offset
|
|
14991
15914
|
if self.size is not None:
|
|
@@ -14994,6 +15917,8 @@ class ListAnnotationLabelsRequest(TeaModel):
|
|
|
14994
15917
|
|
|
14995
15918
|
def from_map(self, m: dict = None):
|
|
14996
15919
|
m = m or dict()
|
|
15920
|
+
if m.get('logstore') is not None:
|
|
15921
|
+
self.logstore = m.get('logstore')
|
|
14997
15922
|
if m.get('offset') is not None:
|
|
14998
15923
|
self.offset = m.get('offset')
|
|
14999
15924
|
if m.get('size') is not None:
|
|
@@ -15001,20 +15926,20 @@ class ListAnnotationLabelsRequest(TeaModel):
|
|
|
15001
15926
|
return self
|
|
15002
15927
|
|
|
15003
15928
|
|
|
15004
|
-
class
|
|
15929
|
+
class ListAzureBlobIngestionResponseBody(TeaModel):
|
|
15005
15930
|
def __init__(
|
|
15006
15931
|
self,
|
|
15007
|
-
|
|
15932
|
+
count: int = None,
|
|
15933
|
+
results: List[AzureBlobIngestion] = None,
|
|
15008
15934
|
total: int = None,
|
|
15009
15935
|
):
|
|
15010
|
-
|
|
15011
|
-
self.
|
|
15012
|
-
# The total number of tags that meet the query conditions.
|
|
15936
|
+
self.count = count
|
|
15937
|
+
self.results = results
|
|
15013
15938
|
self.total = total
|
|
15014
15939
|
|
|
15015
15940
|
def validate(self):
|
|
15016
|
-
if self.
|
|
15017
|
-
for k in self.
|
|
15941
|
+
if self.results:
|
|
15942
|
+
for k in self.results:
|
|
15018
15943
|
if k:
|
|
15019
15944
|
k.validate()
|
|
15020
15945
|
|
|
@@ -15024,32 +15949,36 @@ class ListAnnotationLabelsResponseBody(TeaModel):
|
|
|
15024
15949
|
return _map
|
|
15025
15950
|
|
|
15026
15951
|
result = dict()
|
|
15027
|
-
|
|
15028
|
-
|
|
15029
|
-
|
|
15030
|
-
|
|
15952
|
+
if self.count is not None:
|
|
15953
|
+
result['count'] = self.count
|
|
15954
|
+
result['results'] = []
|
|
15955
|
+
if self.results is not None:
|
|
15956
|
+
for k in self.results:
|
|
15957
|
+
result['results'].append(k.to_map() if k else None)
|
|
15031
15958
|
if self.total is not None:
|
|
15032
15959
|
result['total'] = self.total
|
|
15033
15960
|
return result
|
|
15034
15961
|
|
|
15035
15962
|
def from_map(self, m: dict = None):
|
|
15036
15963
|
m = m or dict()
|
|
15037
|
-
|
|
15038
|
-
|
|
15039
|
-
|
|
15040
|
-
|
|
15041
|
-
|
|
15964
|
+
if m.get('count') is not None:
|
|
15965
|
+
self.count = m.get('count')
|
|
15966
|
+
self.results = []
|
|
15967
|
+
if m.get('results') is not None:
|
|
15968
|
+
for k in m.get('results'):
|
|
15969
|
+
temp_model = AzureBlobIngestion()
|
|
15970
|
+
self.results.append(temp_model.from_map(k))
|
|
15042
15971
|
if m.get('total') is not None:
|
|
15043
15972
|
self.total = m.get('total')
|
|
15044
15973
|
return self
|
|
15045
15974
|
|
|
15046
15975
|
|
|
15047
|
-
class
|
|
15976
|
+
class ListAzureBlobIngestionResponse(TeaModel):
|
|
15048
15977
|
def __init__(
|
|
15049
15978
|
self,
|
|
15050
15979
|
headers: Dict[str, str] = None,
|
|
15051
15980
|
status_code: int = None,
|
|
15052
|
-
body:
|
|
15981
|
+
body: ListAzureBlobIngestionResponseBody = None,
|
|
15053
15982
|
):
|
|
15054
15983
|
self.headers = headers
|
|
15055
15984
|
self.status_code = status_code
|
|
@@ -15080,7 +16009,7 @@ class ListAnnotationLabelsResponse(TeaModel):
|
|
|
15080
16009
|
if m.get('statusCode') is not None:
|
|
15081
16010
|
self.status_code = m.get('statusCode')
|
|
15082
16011
|
if m.get('body') is not None:
|
|
15083
|
-
temp_model =
|
|
16012
|
+
temp_model = ListAzureBlobIngestionResponseBody()
|
|
15084
16013
|
self.body = temp_model.from_map(m['body'])
|
|
15085
16014
|
return self
|
|
15086
16015
|
|
|
@@ -16551,36 +17480,182 @@ class ListDownloadJobsResponseBodyResultsExecutionDetails(TeaModel):
|
|
|
16551
17480
|
return self
|
|
16552
17481
|
|
|
16553
17482
|
|
|
16554
|
-
class ListDownloadJobsResponseBodyResults(TeaModel):
|
|
17483
|
+
class ListDownloadJobsResponseBodyResults(TeaModel):
|
|
17484
|
+
def __init__(
|
|
17485
|
+
self,
|
|
17486
|
+
configuration: ListDownloadJobsResponseBodyResultsConfiguration = None,
|
|
17487
|
+
create_time: str = None,
|
|
17488
|
+
description: str = None,
|
|
17489
|
+
display_name: str = None,
|
|
17490
|
+
execution_details: ListDownloadJobsResponseBodyResultsExecutionDetails = None,
|
|
17491
|
+
name: str = None,
|
|
17492
|
+
status: str = None,
|
|
17493
|
+
):
|
|
17494
|
+
# 下载配置
|
|
17495
|
+
self.configuration = configuration
|
|
17496
|
+
self.create_time = create_time
|
|
17497
|
+
# 任务描述
|
|
17498
|
+
self.description = description
|
|
17499
|
+
# 任务显示名称
|
|
17500
|
+
self.display_name = display_name
|
|
17501
|
+
# The execution details.
|
|
17502
|
+
self.execution_details = execution_details
|
|
17503
|
+
# 代表资源名称的资源属性字段
|
|
17504
|
+
self.name = name
|
|
17505
|
+
# The task status.
|
|
17506
|
+
self.status = status
|
|
17507
|
+
|
|
17508
|
+
def validate(self):
|
|
17509
|
+
if self.configuration:
|
|
17510
|
+
self.configuration.validate()
|
|
17511
|
+
if self.execution_details:
|
|
17512
|
+
self.execution_details.validate()
|
|
17513
|
+
|
|
17514
|
+
def to_map(self):
|
|
17515
|
+
_map = super().to_map()
|
|
17516
|
+
if _map is not None:
|
|
17517
|
+
return _map
|
|
17518
|
+
|
|
17519
|
+
result = dict()
|
|
17520
|
+
if self.configuration is not None:
|
|
17521
|
+
result['configuration'] = self.configuration.to_map()
|
|
17522
|
+
if self.create_time is not None:
|
|
17523
|
+
result['createTime'] = self.create_time
|
|
17524
|
+
if self.description is not None:
|
|
17525
|
+
result['description'] = self.description
|
|
17526
|
+
if self.display_name is not None:
|
|
17527
|
+
result['displayName'] = self.display_name
|
|
17528
|
+
if self.execution_details is not None:
|
|
17529
|
+
result['executionDetails'] = self.execution_details.to_map()
|
|
17530
|
+
if self.name is not None:
|
|
17531
|
+
result['name'] = self.name
|
|
17532
|
+
if self.status is not None:
|
|
17533
|
+
result['status'] = self.status
|
|
17534
|
+
return result
|
|
17535
|
+
|
|
17536
|
+
def from_map(self, m: dict = None):
|
|
17537
|
+
m = m or dict()
|
|
17538
|
+
if m.get('configuration') is not None:
|
|
17539
|
+
temp_model = ListDownloadJobsResponseBodyResultsConfiguration()
|
|
17540
|
+
self.configuration = temp_model.from_map(m['configuration'])
|
|
17541
|
+
if m.get('createTime') is not None:
|
|
17542
|
+
self.create_time = m.get('createTime')
|
|
17543
|
+
if m.get('description') is not None:
|
|
17544
|
+
self.description = m.get('description')
|
|
17545
|
+
if m.get('displayName') is not None:
|
|
17546
|
+
self.display_name = m.get('displayName')
|
|
17547
|
+
if m.get('executionDetails') is not None:
|
|
17548
|
+
temp_model = ListDownloadJobsResponseBodyResultsExecutionDetails()
|
|
17549
|
+
self.execution_details = temp_model.from_map(m['executionDetails'])
|
|
17550
|
+
if m.get('name') is not None:
|
|
17551
|
+
self.name = m.get('name')
|
|
17552
|
+
if m.get('status') is not None:
|
|
17553
|
+
self.status = m.get('status')
|
|
17554
|
+
return self
|
|
17555
|
+
|
|
17556
|
+
|
|
17557
|
+
class ListDownloadJobsResponseBody(TeaModel):
|
|
17558
|
+
def __init__(
|
|
17559
|
+
self,
|
|
17560
|
+
count: int = None,
|
|
17561
|
+
results: List[ListDownloadJobsResponseBodyResults] = None,
|
|
17562
|
+
total: int = None,
|
|
17563
|
+
):
|
|
17564
|
+
self.count = count
|
|
17565
|
+
# Array, to return a list of log download tasks.
|
|
17566
|
+
self.results = results
|
|
17567
|
+
self.total = total
|
|
17568
|
+
|
|
17569
|
+
def validate(self):
|
|
17570
|
+
if self.results:
|
|
17571
|
+
for k in self.results:
|
|
17572
|
+
if k:
|
|
17573
|
+
k.validate()
|
|
17574
|
+
|
|
17575
|
+
def to_map(self):
|
|
17576
|
+
_map = super().to_map()
|
|
17577
|
+
if _map is not None:
|
|
17578
|
+
return _map
|
|
17579
|
+
|
|
17580
|
+
result = dict()
|
|
17581
|
+
if self.count is not None:
|
|
17582
|
+
result['count'] = self.count
|
|
17583
|
+
result['results'] = []
|
|
17584
|
+
if self.results is not None:
|
|
17585
|
+
for k in self.results:
|
|
17586
|
+
result['results'].append(k.to_map() if k else None)
|
|
17587
|
+
if self.total is not None:
|
|
17588
|
+
result['total'] = self.total
|
|
17589
|
+
return result
|
|
17590
|
+
|
|
17591
|
+
def from_map(self, m: dict = None):
|
|
17592
|
+
m = m or dict()
|
|
17593
|
+
if m.get('count') is not None:
|
|
17594
|
+
self.count = m.get('count')
|
|
17595
|
+
self.results = []
|
|
17596
|
+
if m.get('results') is not None:
|
|
17597
|
+
for k in m.get('results'):
|
|
17598
|
+
temp_model = ListDownloadJobsResponseBodyResults()
|
|
17599
|
+
self.results.append(temp_model.from_map(k))
|
|
17600
|
+
if m.get('total') is not None:
|
|
17601
|
+
self.total = m.get('total')
|
|
17602
|
+
return self
|
|
17603
|
+
|
|
17604
|
+
|
|
17605
|
+
class ListDownloadJobsResponse(TeaModel):
|
|
17606
|
+
def __init__(
|
|
17607
|
+
self,
|
|
17608
|
+
headers: Dict[str, str] = None,
|
|
17609
|
+
status_code: int = None,
|
|
17610
|
+
body: ListDownloadJobsResponseBody = None,
|
|
17611
|
+
):
|
|
17612
|
+
self.headers = headers
|
|
17613
|
+
self.status_code = status_code
|
|
17614
|
+
self.body = body
|
|
17615
|
+
|
|
17616
|
+
def validate(self):
|
|
17617
|
+
if self.body:
|
|
17618
|
+
self.body.validate()
|
|
17619
|
+
|
|
17620
|
+
def to_map(self):
|
|
17621
|
+
_map = super().to_map()
|
|
17622
|
+
if _map is not None:
|
|
17623
|
+
return _map
|
|
17624
|
+
|
|
17625
|
+
result = dict()
|
|
17626
|
+
if self.headers is not None:
|
|
17627
|
+
result['headers'] = self.headers
|
|
17628
|
+
if self.status_code is not None:
|
|
17629
|
+
result['statusCode'] = self.status_code
|
|
17630
|
+
if self.body is not None:
|
|
17631
|
+
result['body'] = self.body.to_map()
|
|
17632
|
+
return result
|
|
17633
|
+
|
|
17634
|
+
def from_map(self, m: dict = None):
|
|
17635
|
+
m = m or dict()
|
|
17636
|
+
if m.get('headers') is not None:
|
|
17637
|
+
self.headers = m.get('headers')
|
|
17638
|
+
if m.get('statusCode') is not None:
|
|
17639
|
+
self.status_code = m.get('statusCode')
|
|
17640
|
+
if m.get('body') is not None:
|
|
17641
|
+
temp_model = ListDownloadJobsResponseBody()
|
|
17642
|
+
self.body = temp_model.from_map(m['body'])
|
|
17643
|
+
return self
|
|
17644
|
+
|
|
17645
|
+
|
|
17646
|
+
class ListETLsRequest(TeaModel):
|
|
16555
17647
|
def __init__(
|
|
16556
17648
|
self,
|
|
16557
|
-
|
|
16558
|
-
|
|
16559
|
-
|
|
16560
|
-
display_name: str = None,
|
|
16561
|
-
execution_details: ListDownloadJobsResponseBodyResultsExecutionDetails = None,
|
|
16562
|
-
name: str = None,
|
|
16563
|
-
status: str = None,
|
|
17649
|
+
logstore: str = None,
|
|
17650
|
+
offset: int = None,
|
|
17651
|
+
size: int = None,
|
|
16564
17652
|
):
|
|
16565
|
-
|
|
16566
|
-
self.
|
|
16567
|
-
self.
|
|
16568
|
-
# 任务描述
|
|
16569
|
-
self.description = description
|
|
16570
|
-
# 任务显示名称
|
|
16571
|
-
self.display_name = display_name
|
|
16572
|
-
# The execution details.
|
|
16573
|
-
self.execution_details = execution_details
|
|
16574
|
-
# 代表资源名称的资源属性字段
|
|
16575
|
-
self.name = name
|
|
16576
|
-
# The task status.
|
|
16577
|
-
self.status = status
|
|
17653
|
+
self.logstore = logstore
|
|
17654
|
+
self.offset = offset
|
|
17655
|
+
self.size = size
|
|
16578
17656
|
|
|
16579
17657
|
def validate(self):
|
|
16580
|
-
|
|
16581
|
-
self.configuration.validate()
|
|
16582
|
-
if self.execution_details:
|
|
16583
|
-
self.execution_details.validate()
|
|
17658
|
+
pass
|
|
16584
17659
|
|
|
16585
17660
|
def to_map(self):
|
|
16586
17661
|
_map = super().to_map()
|
|
@@ -16588,53 +17663,36 @@ class ListDownloadJobsResponseBodyResults(TeaModel):
|
|
|
16588
17663
|
return _map
|
|
16589
17664
|
|
|
16590
17665
|
result = dict()
|
|
16591
|
-
if self.
|
|
16592
|
-
result['
|
|
16593
|
-
if self.
|
|
16594
|
-
result['
|
|
16595
|
-
if self.
|
|
16596
|
-
result['
|
|
16597
|
-
if self.display_name is not None:
|
|
16598
|
-
result['displayName'] = self.display_name
|
|
16599
|
-
if self.execution_details is not None:
|
|
16600
|
-
result['executionDetails'] = self.execution_details.to_map()
|
|
16601
|
-
if self.name is not None:
|
|
16602
|
-
result['name'] = self.name
|
|
16603
|
-
if self.status is not None:
|
|
16604
|
-
result['status'] = self.status
|
|
17666
|
+
if self.logstore is not None:
|
|
17667
|
+
result['logstore'] = self.logstore
|
|
17668
|
+
if self.offset is not None:
|
|
17669
|
+
result['offset'] = self.offset
|
|
17670
|
+
if self.size is not None:
|
|
17671
|
+
result['size'] = self.size
|
|
16605
17672
|
return result
|
|
16606
17673
|
|
|
16607
17674
|
def from_map(self, m: dict = None):
|
|
16608
17675
|
m = m or dict()
|
|
16609
|
-
if m.get('
|
|
16610
|
-
|
|
16611
|
-
|
|
16612
|
-
|
|
16613
|
-
|
|
16614
|
-
|
|
16615
|
-
self.description = m.get('description')
|
|
16616
|
-
if m.get('displayName') is not None:
|
|
16617
|
-
self.display_name = m.get('displayName')
|
|
16618
|
-
if m.get('executionDetails') is not None:
|
|
16619
|
-
temp_model = ListDownloadJobsResponseBodyResultsExecutionDetails()
|
|
16620
|
-
self.execution_details = temp_model.from_map(m['executionDetails'])
|
|
16621
|
-
if m.get('name') is not None:
|
|
16622
|
-
self.name = m.get('name')
|
|
16623
|
-
if m.get('status') is not None:
|
|
16624
|
-
self.status = m.get('status')
|
|
17676
|
+
if m.get('logstore') is not None:
|
|
17677
|
+
self.logstore = m.get('logstore')
|
|
17678
|
+
if m.get('offset') is not None:
|
|
17679
|
+
self.offset = m.get('offset')
|
|
17680
|
+
if m.get('size') is not None:
|
|
17681
|
+
self.size = m.get('size')
|
|
16625
17682
|
return self
|
|
16626
17683
|
|
|
16627
17684
|
|
|
16628
|
-
class
|
|
17685
|
+
class ListETLsResponseBody(TeaModel):
|
|
16629
17686
|
def __init__(
|
|
16630
17687
|
self,
|
|
16631
17688
|
count: int = None,
|
|
16632
|
-
results: List[
|
|
17689
|
+
results: List[ETL] = None,
|
|
16633
17690
|
total: int = None,
|
|
16634
17691
|
):
|
|
17692
|
+
# The number of data transformation jobs that are returned.
|
|
16635
17693
|
self.count = count
|
|
16636
|
-
# Array, to return a list of log download tasks.
|
|
16637
17694
|
self.results = results
|
|
17695
|
+
# The total number of data transformation jobs in the project.
|
|
16638
17696
|
self.total = total
|
|
16639
17697
|
|
|
16640
17698
|
def validate(self):
|
|
@@ -16666,19 +17724,19 @@ class ListDownloadJobsResponseBody(TeaModel):
|
|
|
16666
17724
|
self.results = []
|
|
16667
17725
|
if m.get('results') is not None:
|
|
16668
17726
|
for k in m.get('results'):
|
|
16669
|
-
temp_model =
|
|
17727
|
+
temp_model = ETL()
|
|
16670
17728
|
self.results.append(temp_model.from_map(k))
|
|
16671
17729
|
if m.get('total') is not None:
|
|
16672
17730
|
self.total = m.get('total')
|
|
16673
17731
|
return self
|
|
16674
17732
|
|
|
16675
17733
|
|
|
16676
|
-
class
|
|
17734
|
+
class ListETLsResponse(TeaModel):
|
|
16677
17735
|
def __init__(
|
|
16678
17736
|
self,
|
|
16679
17737
|
headers: Dict[str, str] = None,
|
|
16680
17738
|
status_code: int = None,
|
|
16681
|
-
body:
|
|
17739
|
+
body: ListETLsResponseBody = None,
|
|
16682
17740
|
):
|
|
16683
17741
|
self.headers = headers
|
|
16684
17742
|
self.status_code = status_code
|
|
@@ -16709,12 +17767,12 @@ class ListDownloadJobsResponse(TeaModel):
|
|
|
16709
17767
|
if m.get('statusCode') is not None:
|
|
16710
17768
|
self.status_code = m.get('statusCode')
|
|
16711
17769
|
if m.get('body') is not None:
|
|
16712
|
-
temp_model =
|
|
17770
|
+
temp_model = ListETLsResponseBody()
|
|
16713
17771
|
self.body = temp_model.from_map(m['body'])
|
|
16714
17772
|
return self
|
|
16715
17773
|
|
|
16716
17774
|
|
|
16717
|
-
class
|
|
17775
|
+
class ListElasticsearchIngestionsRequest(TeaModel):
|
|
16718
17776
|
def __init__(
|
|
16719
17777
|
self,
|
|
16720
17778
|
logstore: str = None,
|
|
@@ -16753,17 +17811,15 @@ class ListETLsRequest(TeaModel):
|
|
|
16753
17811
|
return self
|
|
16754
17812
|
|
|
16755
17813
|
|
|
16756
|
-
class
|
|
17814
|
+
class ListElasticsearchIngestionsResponseBody(TeaModel):
|
|
16757
17815
|
def __init__(
|
|
16758
17816
|
self,
|
|
16759
17817
|
count: int = None,
|
|
16760
|
-
results: List[
|
|
17818
|
+
results: List[ESIngestion] = None,
|
|
16761
17819
|
total: int = None,
|
|
16762
17820
|
):
|
|
16763
|
-
# The number of data transformation jobs that are returned.
|
|
16764
17821
|
self.count = count
|
|
16765
17822
|
self.results = results
|
|
16766
|
-
# The total number of data transformation jobs in the project.
|
|
16767
17823
|
self.total = total
|
|
16768
17824
|
|
|
16769
17825
|
def validate(self):
|
|
@@ -16795,19 +17851,19 @@ class ListETLsResponseBody(TeaModel):
|
|
|
16795
17851
|
self.results = []
|
|
16796
17852
|
if m.get('results') is not None:
|
|
16797
17853
|
for k in m.get('results'):
|
|
16798
|
-
temp_model =
|
|
17854
|
+
temp_model = ESIngestion()
|
|
16799
17855
|
self.results.append(temp_model.from_map(k))
|
|
16800
17856
|
if m.get('total') is not None:
|
|
16801
17857
|
self.total = m.get('total')
|
|
16802
17858
|
return self
|
|
16803
17859
|
|
|
16804
17860
|
|
|
16805
|
-
class
|
|
17861
|
+
class ListElasticsearchIngestionsResponse(TeaModel):
|
|
16806
17862
|
def __init__(
|
|
16807
17863
|
self,
|
|
16808
17864
|
headers: Dict[str, str] = None,
|
|
16809
17865
|
status_code: int = None,
|
|
16810
|
-
body:
|
|
17866
|
+
body: ListElasticsearchIngestionsResponseBody = None,
|
|
16811
17867
|
):
|
|
16812
17868
|
self.headers = headers
|
|
16813
17869
|
self.status_code = status_code
|
|
@@ -16838,7 +17894,7 @@ class ListETLsResponse(TeaModel):
|
|
|
16838
17894
|
if m.get('statusCode') is not None:
|
|
16839
17895
|
self.status_code = m.get('statusCode')
|
|
16840
17896
|
if m.get('body') is not None:
|
|
16841
|
-
temp_model =
|
|
17897
|
+
temp_model = ListElasticsearchIngestionsResponseBody()
|
|
16842
17898
|
self.body = temp_model.from_map(m['body'])
|
|
16843
17899
|
return self
|
|
16844
17900
|
|
|
@@ -18166,12 +19222,14 @@ class ListOSSIngestionsResponse(TeaModel):
|
|
|
18166
19222
|
class ListProjectRequest(TeaModel):
|
|
18167
19223
|
def __init__(
|
|
18168
19224
|
self,
|
|
19225
|
+
description: str = None,
|
|
18169
19226
|
fetch_quota: bool = None,
|
|
18170
19227
|
offset: int = None,
|
|
18171
19228
|
project_name: str = None,
|
|
18172
19229
|
resource_group_id: str = None,
|
|
18173
19230
|
size: int = None,
|
|
18174
19231
|
):
|
|
19232
|
+
self.description = description
|
|
18175
19233
|
self.fetch_quota = fetch_quota
|
|
18176
19234
|
# The line from which the query starts. Default value: 0.
|
|
18177
19235
|
self.offset = offset
|
|
@@ -18190,6 +19248,8 @@ class ListProjectRequest(TeaModel):
|
|
|
18190
19248
|
return _map
|
|
18191
19249
|
|
|
18192
19250
|
result = dict()
|
|
19251
|
+
if self.description is not None:
|
|
19252
|
+
result['description'] = self.description
|
|
18193
19253
|
if self.fetch_quota is not None:
|
|
18194
19254
|
result['fetchQuota'] = self.fetch_quota
|
|
18195
19255
|
if self.offset is not None:
|
|
@@ -18204,6 +19264,8 @@ class ListProjectRequest(TeaModel):
|
|
|
18204
19264
|
|
|
18205
19265
|
def from_map(self, m: dict = None):
|
|
18206
19266
|
m = m or dict()
|
|
19267
|
+
if m.get('description') is not None:
|
|
19268
|
+
self.description = m.get('description')
|
|
18207
19269
|
if m.get('fetchQuota') is not None:
|
|
18208
19270
|
self.fetch_quota = m.get('fetchQuota')
|
|
18209
19271
|
if m.get('offset') is not None:
|
|
@@ -19980,25 +21042,178 @@ class RefreshTokenResponseBody(TeaModel):
|
|
|
19980
21042
|
|
|
19981
21043
|
def from_map(self, m: dict = None):
|
|
19982
21044
|
m = m or dict()
|
|
19983
|
-
if m.get('accessToken') is not None:
|
|
19984
|
-
self.access_token = m.get('accessToken')
|
|
21045
|
+
if m.get('accessToken') is not None:
|
|
21046
|
+
self.access_token = m.get('accessToken')
|
|
21047
|
+
return self
|
|
21048
|
+
|
|
21049
|
+
|
|
21050
|
+
class RefreshTokenResponse(TeaModel):
|
|
21051
|
+
def __init__(
|
|
21052
|
+
self,
|
|
21053
|
+
headers: Dict[str, str] = None,
|
|
21054
|
+
status_code: int = None,
|
|
21055
|
+
body: RefreshTokenResponseBody = None,
|
|
21056
|
+
):
|
|
21057
|
+
self.headers = headers
|
|
21058
|
+
self.status_code = status_code
|
|
21059
|
+
self.body = body
|
|
21060
|
+
|
|
21061
|
+
def validate(self):
|
|
21062
|
+
if self.body:
|
|
21063
|
+
self.body.validate()
|
|
21064
|
+
|
|
21065
|
+
def to_map(self):
|
|
21066
|
+
_map = super().to_map()
|
|
21067
|
+
if _map is not None:
|
|
21068
|
+
return _map
|
|
21069
|
+
|
|
21070
|
+
result = dict()
|
|
21071
|
+
if self.headers is not None:
|
|
21072
|
+
result['headers'] = self.headers
|
|
21073
|
+
if self.status_code is not None:
|
|
21074
|
+
result['statusCode'] = self.status_code
|
|
21075
|
+
if self.body is not None:
|
|
21076
|
+
result['body'] = self.body.to_map()
|
|
21077
|
+
return result
|
|
21078
|
+
|
|
21079
|
+
def from_map(self, m: dict = None):
|
|
21080
|
+
m = m or dict()
|
|
21081
|
+
if m.get('headers') is not None:
|
|
21082
|
+
self.headers = m.get('headers')
|
|
21083
|
+
if m.get('statusCode') is not None:
|
|
21084
|
+
self.status_code = m.get('statusCode')
|
|
21085
|
+
if m.get('body') is not None:
|
|
21086
|
+
temp_model = RefreshTokenResponseBody()
|
|
21087
|
+
self.body = temp_model.from_map(m['body'])
|
|
21088
|
+
return self
|
|
21089
|
+
|
|
21090
|
+
|
|
21091
|
+
class RemoveConfigFromMachineGroupResponse(TeaModel):
|
|
21092
|
+
def __init__(
|
|
21093
|
+
self,
|
|
21094
|
+
headers: Dict[str, str] = None,
|
|
21095
|
+
status_code: int = None,
|
|
21096
|
+
):
|
|
21097
|
+
self.headers = headers
|
|
21098
|
+
self.status_code = status_code
|
|
21099
|
+
|
|
21100
|
+
def validate(self):
|
|
21101
|
+
pass
|
|
21102
|
+
|
|
21103
|
+
def to_map(self):
|
|
21104
|
+
_map = super().to_map()
|
|
21105
|
+
if _map is not None:
|
|
21106
|
+
return _map
|
|
21107
|
+
|
|
21108
|
+
result = dict()
|
|
21109
|
+
if self.headers is not None:
|
|
21110
|
+
result['headers'] = self.headers
|
|
21111
|
+
if self.status_code is not None:
|
|
21112
|
+
result['statusCode'] = self.status_code
|
|
21113
|
+
return result
|
|
21114
|
+
|
|
21115
|
+
def from_map(self, m: dict = None):
|
|
21116
|
+
m = m or dict()
|
|
21117
|
+
if m.get('headers') is not None:
|
|
21118
|
+
self.headers = m.get('headers')
|
|
21119
|
+
if m.get('statusCode') is not None:
|
|
21120
|
+
self.status_code = m.get('statusCode')
|
|
21121
|
+
return self
|
|
21122
|
+
|
|
21123
|
+
|
|
21124
|
+
class SplitShardRequest(TeaModel):
|
|
21125
|
+
def __init__(
|
|
21126
|
+
self,
|
|
21127
|
+
key: str = None,
|
|
21128
|
+
shard_count: int = None,
|
|
21129
|
+
):
|
|
21130
|
+
# The position where the shard is split.
|
|
21131
|
+
self.key = key
|
|
21132
|
+
# The number of new shards that are generated after splitting.
|
|
21133
|
+
self.shard_count = shard_count
|
|
21134
|
+
|
|
21135
|
+
def validate(self):
|
|
21136
|
+
pass
|
|
21137
|
+
|
|
21138
|
+
def to_map(self):
|
|
21139
|
+
_map = super().to_map()
|
|
21140
|
+
if _map is not None:
|
|
21141
|
+
return _map
|
|
21142
|
+
|
|
21143
|
+
result = dict()
|
|
21144
|
+
if self.key is not None:
|
|
21145
|
+
result['key'] = self.key
|
|
21146
|
+
if self.shard_count is not None:
|
|
21147
|
+
result['shardCount'] = self.shard_count
|
|
21148
|
+
return result
|
|
21149
|
+
|
|
21150
|
+
def from_map(self, m: dict = None):
|
|
21151
|
+
m = m or dict()
|
|
21152
|
+
if m.get('key') is not None:
|
|
21153
|
+
self.key = m.get('key')
|
|
21154
|
+
if m.get('shardCount') is not None:
|
|
21155
|
+
self.shard_count = m.get('shardCount')
|
|
21156
|
+
return self
|
|
21157
|
+
|
|
21158
|
+
|
|
21159
|
+
class SplitShardResponse(TeaModel):
|
|
21160
|
+
def __init__(
|
|
21161
|
+
self,
|
|
21162
|
+
headers: Dict[str, str] = None,
|
|
21163
|
+
status_code: int = None,
|
|
21164
|
+
body: List[Shard] = None,
|
|
21165
|
+
):
|
|
21166
|
+
self.headers = headers
|
|
21167
|
+
self.status_code = status_code
|
|
21168
|
+
self.body = body
|
|
21169
|
+
|
|
21170
|
+
def validate(self):
|
|
21171
|
+
if self.body:
|
|
21172
|
+
for k in self.body:
|
|
21173
|
+
if k:
|
|
21174
|
+
k.validate()
|
|
21175
|
+
|
|
21176
|
+
def to_map(self):
|
|
21177
|
+
_map = super().to_map()
|
|
21178
|
+
if _map is not None:
|
|
21179
|
+
return _map
|
|
21180
|
+
|
|
21181
|
+
result = dict()
|
|
21182
|
+
if self.headers is not None:
|
|
21183
|
+
result['headers'] = self.headers
|
|
21184
|
+
if self.status_code is not None:
|
|
21185
|
+
result['statusCode'] = self.status_code
|
|
21186
|
+
result['body'] = []
|
|
21187
|
+
if self.body is not None:
|
|
21188
|
+
for k in self.body:
|
|
21189
|
+
result['body'].append(k.to_map() if k else None)
|
|
21190
|
+
return result
|
|
21191
|
+
|
|
21192
|
+
def from_map(self, m: dict = None):
|
|
21193
|
+
m = m or dict()
|
|
21194
|
+
if m.get('headers') is not None:
|
|
21195
|
+
self.headers = m.get('headers')
|
|
21196
|
+
if m.get('statusCode') is not None:
|
|
21197
|
+
self.status_code = m.get('statusCode')
|
|
21198
|
+
self.body = []
|
|
21199
|
+
if m.get('body') is not None:
|
|
21200
|
+
for k in m.get('body'):
|
|
21201
|
+
temp_model = Shard()
|
|
21202
|
+
self.body.append(temp_model.from_map(k))
|
|
19985
21203
|
return self
|
|
19986
21204
|
|
|
19987
21205
|
|
|
19988
|
-
class
|
|
21206
|
+
class StartAzureBlobIngestionResponse(TeaModel):
|
|
19989
21207
|
def __init__(
|
|
19990
21208
|
self,
|
|
19991
21209
|
headers: Dict[str, str] = None,
|
|
19992
21210
|
status_code: int = None,
|
|
19993
|
-
body: RefreshTokenResponseBody = None,
|
|
19994
21211
|
):
|
|
19995
21212
|
self.headers = headers
|
|
19996
21213
|
self.status_code = status_code
|
|
19997
|
-
self.body = body
|
|
19998
21214
|
|
|
19999
21215
|
def validate(self):
|
|
20000
|
-
|
|
20001
|
-
self.body.validate()
|
|
21216
|
+
pass
|
|
20002
21217
|
|
|
20003
21218
|
def to_map(self):
|
|
20004
21219
|
_map = super().to_map()
|
|
@@ -20010,8 +21225,6 @@ class RefreshTokenResponse(TeaModel):
|
|
|
20010
21225
|
result['headers'] = self.headers
|
|
20011
21226
|
if self.status_code is not None:
|
|
20012
21227
|
result['statusCode'] = self.status_code
|
|
20013
|
-
if self.body is not None:
|
|
20014
|
-
result['body'] = self.body.to_map()
|
|
20015
21228
|
return result
|
|
20016
21229
|
|
|
20017
21230
|
def from_map(self, m: dict = None):
|
|
@@ -20020,13 +21233,10 @@ class RefreshTokenResponse(TeaModel):
|
|
|
20020
21233
|
self.headers = m.get('headers')
|
|
20021
21234
|
if m.get('statusCode') is not None:
|
|
20022
21235
|
self.status_code = m.get('statusCode')
|
|
20023
|
-
if m.get('body') is not None:
|
|
20024
|
-
temp_model = RefreshTokenResponseBody()
|
|
20025
|
-
self.body = temp_model.from_map(m['body'])
|
|
20026
21236
|
return self
|
|
20027
21237
|
|
|
20028
21238
|
|
|
20029
|
-
class
|
|
21239
|
+
class StartETLResponse(TeaModel):
|
|
20030
21240
|
def __init__(
|
|
20031
21241
|
self,
|
|
20032
21242
|
headers: Dict[str, str] = None,
|
|
@@ -20059,16 +21269,14 @@ class RemoveConfigFromMachineGroupResponse(TeaModel):
|
|
|
20059
21269
|
return self
|
|
20060
21270
|
|
|
20061
21271
|
|
|
20062
|
-
class
|
|
21272
|
+
class StartElasticsearchIngestionResponse(TeaModel):
|
|
20063
21273
|
def __init__(
|
|
20064
21274
|
self,
|
|
20065
|
-
|
|
20066
|
-
|
|
21275
|
+
headers: Dict[str, str] = None,
|
|
21276
|
+
status_code: int = None,
|
|
20067
21277
|
):
|
|
20068
|
-
|
|
20069
|
-
self.
|
|
20070
|
-
# The number of new shards that are generated after splitting.
|
|
20071
|
-
self.shard_count = shard_count
|
|
21278
|
+
self.headers = headers
|
|
21279
|
+
self.status_code = status_code
|
|
20072
21280
|
|
|
20073
21281
|
def validate(self):
|
|
20074
21282
|
pass
|
|
@@ -20079,37 +21287,32 @@ class SplitShardRequest(TeaModel):
|
|
|
20079
21287
|
return _map
|
|
20080
21288
|
|
|
20081
21289
|
result = dict()
|
|
20082
|
-
if self.
|
|
20083
|
-
result['
|
|
20084
|
-
if self.
|
|
20085
|
-
result['
|
|
21290
|
+
if self.headers is not None:
|
|
21291
|
+
result['headers'] = self.headers
|
|
21292
|
+
if self.status_code is not None:
|
|
21293
|
+
result['statusCode'] = self.status_code
|
|
20086
21294
|
return result
|
|
20087
21295
|
|
|
20088
21296
|
def from_map(self, m: dict = None):
|
|
20089
21297
|
m = m or dict()
|
|
20090
|
-
if m.get('
|
|
20091
|
-
self.
|
|
20092
|
-
if m.get('
|
|
20093
|
-
self.
|
|
21298
|
+
if m.get('headers') is not None:
|
|
21299
|
+
self.headers = m.get('headers')
|
|
21300
|
+
if m.get('statusCode') is not None:
|
|
21301
|
+
self.status_code = m.get('statusCode')
|
|
20094
21302
|
return self
|
|
20095
21303
|
|
|
20096
21304
|
|
|
20097
|
-
class
|
|
21305
|
+
class StartMaxComputeExportResponse(TeaModel):
|
|
20098
21306
|
def __init__(
|
|
20099
21307
|
self,
|
|
20100
21308
|
headers: Dict[str, str] = None,
|
|
20101
21309
|
status_code: int = None,
|
|
20102
|
-
body: List[Shard] = None,
|
|
20103
21310
|
):
|
|
20104
21311
|
self.headers = headers
|
|
20105
21312
|
self.status_code = status_code
|
|
20106
|
-
self.body = body
|
|
20107
21313
|
|
|
20108
21314
|
def validate(self):
|
|
20109
|
-
|
|
20110
|
-
for k in self.body:
|
|
20111
|
-
if k:
|
|
20112
|
-
k.validate()
|
|
21315
|
+
pass
|
|
20113
21316
|
|
|
20114
21317
|
def to_map(self):
|
|
20115
21318
|
_map = super().to_map()
|
|
@@ -20121,10 +21324,6 @@ class SplitShardResponse(TeaModel):
|
|
|
20121
21324
|
result['headers'] = self.headers
|
|
20122
21325
|
if self.status_code is not None:
|
|
20123
21326
|
result['statusCode'] = self.status_code
|
|
20124
|
-
result['body'] = []
|
|
20125
|
-
if self.body is not None:
|
|
20126
|
-
for k in self.body:
|
|
20127
|
-
result['body'].append(k.to_map() if k else None)
|
|
20128
21327
|
return result
|
|
20129
21328
|
|
|
20130
21329
|
def from_map(self, m: dict = None):
|
|
@@ -20133,15 +21332,10 @@ class SplitShardResponse(TeaModel):
|
|
|
20133
21332
|
self.headers = m.get('headers')
|
|
20134
21333
|
if m.get('statusCode') is not None:
|
|
20135
21334
|
self.status_code = m.get('statusCode')
|
|
20136
|
-
self.body = []
|
|
20137
|
-
if m.get('body') is not None:
|
|
20138
|
-
for k in m.get('body'):
|
|
20139
|
-
temp_model = Shard()
|
|
20140
|
-
self.body.append(temp_model.from_map(k))
|
|
20141
21335
|
return self
|
|
20142
21336
|
|
|
20143
21337
|
|
|
20144
|
-
class
|
|
21338
|
+
class StartOSSExportResponse(TeaModel):
|
|
20145
21339
|
def __init__(
|
|
20146
21340
|
self,
|
|
20147
21341
|
headers: Dict[str, str] = None,
|
|
@@ -20174,7 +21368,7 @@ class StartETLResponse(TeaModel):
|
|
|
20174
21368
|
return self
|
|
20175
21369
|
|
|
20176
21370
|
|
|
20177
|
-
class
|
|
21371
|
+
class StartOSSHDFSExportResponse(TeaModel):
|
|
20178
21372
|
def __init__(
|
|
20179
21373
|
self,
|
|
20180
21374
|
headers: Dict[str, str] = None,
|
|
@@ -20207,7 +21401,7 @@ class StartMaxComputeExportResponse(TeaModel):
|
|
|
20207
21401
|
return self
|
|
20208
21402
|
|
|
20209
21403
|
|
|
20210
|
-
class
|
|
21404
|
+
class StartOSSIngestionResponse(TeaModel):
|
|
20211
21405
|
def __init__(
|
|
20212
21406
|
self,
|
|
20213
21407
|
headers: Dict[str, str] = None,
|
|
@@ -20240,7 +21434,7 @@ class StartOSSExportResponse(TeaModel):
|
|
|
20240
21434
|
return self
|
|
20241
21435
|
|
|
20242
21436
|
|
|
20243
|
-
class
|
|
21437
|
+
class StopAzureBlobIngestionResponse(TeaModel):
|
|
20244
21438
|
def __init__(
|
|
20245
21439
|
self,
|
|
20246
21440
|
headers: Dict[str, str] = None,
|
|
@@ -20273,7 +21467,7 @@ class StartOSSHDFSExportResponse(TeaModel):
|
|
|
20273
21467
|
return self
|
|
20274
21468
|
|
|
20275
21469
|
|
|
20276
|
-
class
|
|
21470
|
+
class StopETLResponse(TeaModel):
|
|
20277
21471
|
def __init__(
|
|
20278
21472
|
self,
|
|
20279
21473
|
headers: Dict[str, str] = None,
|
|
@@ -20306,7 +21500,7 @@ class StartOSSIngestionResponse(TeaModel):
|
|
|
20306
21500
|
return self
|
|
20307
21501
|
|
|
20308
21502
|
|
|
20309
|
-
class
|
|
21503
|
+
class StopElasticsearchIngestionResponse(TeaModel):
|
|
20310
21504
|
def __init__(
|
|
20311
21505
|
self,
|
|
20312
21506
|
headers: Dict[str, str] = None,
|
|
@@ -21053,6 +22247,91 @@ class UpdateAnnotationLabelResponse(TeaModel):
|
|
|
21053
22247
|
return self
|
|
21054
22248
|
|
|
21055
22249
|
|
|
22250
|
+
class UpdateAzureBlobIngestionRequest(TeaModel):
|
|
22251
|
+
def __init__(
|
|
22252
|
+
self,
|
|
22253
|
+
configuration: AzureBlobIngestionConfiguration = None,
|
|
22254
|
+
description: str = None,
|
|
22255
|
+
display_name: str = None,
|
|
22256
|
+
schedule: Schedule = None,
|
|
22257
|
+
):
|
|
22258
|
+
# This parameter is required.
|
|
22259
|
+
self.configuration = configuration
|
|
22260
|
+
self.description = description
|
|
22261
|
+
# This parameter is required.
|
|
22262
|
+
self.display_name = display_name
|
|
22263
|
+
self.schedule = schedule
|
|
22264
|
+
|
|
22265
|
+
def validate(self):
|
|
22266
|
+
if self.configuration:
|
|
22267
|
+
self.configuration.validate()
|
|
22268
|
+
if self.schedule:
|
|
22269
|
+
self.schedule.validate()
|
|
22270
|
+
|
|
22271
|
+
def to_map(self):
|
|
22272
|
+
_map = super().to_map()
|
|
22273
|
+
if _map is not None:
|
|
22274
|
+
return _map
|
|
22275
|
+
|
|
22276
|
+
result = dict()
|
|
22277
|
+
if self.configuration is not None:
|
|
22278
|
+
result['configuration'] = self.configuration.to_map()
|
|
22279
|
+
if self.description is not None:
|
|
22280
|
+
result['description'] = self.description
|
|
22281
|
+
if self.display_name is not None:
|
|
22282
|
+
result['displayName'] = self.display_name
|
|
22283
|
+
if self.schedule is not None:
|
|
22284
|
+
result['schedule'] = self.schedule.to_map()
|
|
22285
|
+
return result
|
|
22286
|
+
|
|
22287
|
+
def from_map(self, m: dict = None):
|
|
22288
|
+
m = m or dict()
|
|
22289
|
+
if m.get('configuration') is not None:
|
|
22290
|
+
temp_model = AzureBlobIngestionConfiguration()
|
|
22291
|
+
self.configuration = temp_model.from_map(m['configuration'])
|
|
22292
|
+
if m.get('description') is not None:
|
|
22293
|
+
self.description = m.get('description')
|
|
22294
|
+
if m.get('displayName') is not None:
|
|
22295
|
+
self.display_name = m.get('displayName')
|
|
22296
|
+
if m.get('schedule') is not None:
|
|
22297
|
+
temp_model = Schedule()
|
|
22298
|
+
self.schedule = temp_model.from_map(m['schedule'])
|
|
22299
|
+
return self
|
|
22300
|
+
|
|
22301
|
+
|
|
22302
|
+
class UpdateAzureBlobIngestionResponse(TeaModel):
|
|
22303
|
+
def __init__(
|
|
22304
|
+
self,
|
|
22305
|
+
headers: Dict[str, str] = None,
|
|
22306
|
+
status_code: int = None,
|
|
22307
|
+
):
|
|
22308
|
+
self.headers = headers
|
|
22309
|
+
self.status_code = status_code
|
|
22310
|
+
|
|
22311
|
+
def validate(self):
|
|
22312
|
+
pass
|
|
22313
|
+
|
|
22314
|
+
def to_map(self):
|
|
22315
|
+
_map = super().to_map()
|
|
22316
|
+
if _map is not None:
|
|
22317
|
+
return _map
|
|
22318
|
+
|
|
22319
|
+
result = dict()
|
|
22320
|
+
if self.headers is not None:
|
|
22321
|
+
result['headers'] = self.headers
|
|
22322
|
+
if self.status_code is not None:
|
|
22323
|
+
result['statusCode'] = self.status_code
|
|
22324
|
+
return result
|
|
22325
|
+
|
|
22326
|
+
def from_map(self, m: dict = None):
|
|
22327
|
+
m = m or dict()
|
|
22328
|
+
if m.get('headers') is not None:
|
|
22329
|
+
self.headers = m.get('headers')
|
|
22330
|
+
if m.get('statusCode') is not None:
|
|
22331
|
+
self.status_code = m.get('statusCode')
|
|
22332
|
+
return self
|
|
22333
|
+
|
|
22334
|
+
|
|
21056
22335
|
class UpdateConfigRequest(TeaModel):
|
|
21057
22336
|
def __init__(
|
|
21058
22337
|
self,
|
|
@@ -21366,6 +22645,91 @@ class UpdateETLResponse(TeaModel):
|
|
|
21366
22645
|
return self
|
|
21367
22646
|
|
|
21368
22647
|
|
|
22648
|
+
class UpdateElasticsearchIngestionRequest(TeaModel):
|
|
22649
|
+
def __init__(
|
|
22650
|
+
self,
|
|
22651
|
+
configuration: ESIngestionConfiguration = None,
|
|
22652
|
+
description: str = None,
|
|
22653
|
+
display_name: str = None,
|
|
22654
|
+
schedule: Schedule = None,
|
|
22655
|
+
):
|
|
22656
|
+
# This parameter is required.
|
|
22657
|
+
self.configuration = configuration
|
|
22658
|
+
self.description = description
|
|
22659
|
+
# This parameter is required.
|
|
22660
|
+
self.display_name = display_name
|
|
22661
|
+
self.schedule = schedule
|
|
22662
|
+
|
|
22663
|
+
def validate(self):
|
|
22664
|
+
if self.configuration:
|
|
22665
|
+
self.configuration.validate()
|
|
22666
|
+
if self.schedule:
|
|
22667
|
+
self.schedule.validate()
|
|
22668
|
+
|
|
22669
|
+
def to_map(self):
|
|
22670
|
+
_map = super().to_map()
|
|
22671
|
+
if _map is not None:
|
|
22672
|
+
return _map
|
|
22673
|
+
|
|
22674
|
+
result = dict()
|
|
22675
|
+
if self.configuration is not None:
|
|
22676
|
+
result['configuration'] = self.configuration.to_map()
|
|
22677
|
+
if self.description is not None:
|
|
22678
|
+
result['description'] = self.description
|
|
22679
|
+
if self.display_name is not None:
|
|
22680
|
+
result['displayName'] = self.display_name
|
|
22681
|
+
if self.schedule is not None:
|
|
22682
|
+
result['schedule'] = self.schedule.to_map()
|
|
22683
|
+
return result
|
|
22684
|
+
|
|
22685
|
+
def from_map(self, m: dict = None):
|
|
22686
|
+
m = m or dict()
|
|
22687
|
+
if m.get('configuration') is not None:
|
|
22688
|
+
temp_model = ESIngestionConfiguration()
|
|
22689
|
+
self.configuration = temp_model.from_map(m['configuration'])
|
|
22690
|
+
if m.get('description') is not None:
|
|
22691
|
+
self.description = m.get('description')
|
|
22692
|
+
if m.get('displayName') is not None:
|
|
22693
|
+
self.display_name = m.get('displayName')
|
|
22694
|
+
if m.get('schedule') is not None:
|
|
22695
|
+
temp_model = Schedule()
|
|
22696
|
+
self.schedule = temp_model.from_map(m['schedule'])
|
|
22697
|
+
return self
|
|
22698
|
+
|
|
22699
|
+
|
|
22700
|
+
class UpdateElasticsearchIngestionResponse(TeaModel):
|
|
22701
|
+
def __init__(
|
|
22702
|
+
self,
|
|
22703
|
+
headers: Dict[str, str] = None,
|
|
22704
|
+
status_code: int = None,
|
|
22705
|
+
):
|
|
22706
|
+
self.headers = headers
|
|
22707
|
+
self.status_code = status_code
|
|
22708
|
+
|
|
22709
|
+
def validate(self):
|
|
22710
|
+
pass
|
|
22711
|
+
|
|
22712
|
+
def to_map(self):
|
|
22713
|
+
_map = super().to_map()
|
|
22714
|
+
if _map is not None:
|
|
22715
|
+
return _map
|
|
22716
|
+
|
|
22717
|
+
result = dict()
|
|
22718
|
+
if self.headers is not None:
|
|
22719
|
+
result['headers'] = self.headers
|
|
22720
|
+
if self.status_code is not None:
|
|
22721
|
+
result['statusCode'] = self.status_code
|
|
22722
|
+
return result
|
|
22723
|
+
|
|
22724
|
+
def from_map(self, m: dict = None):
|
|
22725
|
+
m = m or dict()
|
|
22726
|
+
if m.get('headers') is not None:
|
|
22727
|
+
self.headers = m.get('headers')
|
|
22728
|
+
if m.get('statusCode') is not None:
|
|
22729
|
+
self.status_code = m.get('statusCode')
|
|
22730
|
+
return self
|
|
22731
|
+
|
|
22732
|
+
|
|
21369
22733
|
class UpdateIndexRequest(TeaModel):
|
|
21370
22734
|
def __init__(
|
|
21371
22735
|
self,
|
|
@@ -21442,6 +22806,7 @@ class UpdateLogStoreRequest(TeaModel):
|
|
|
21442
22806
|
max_split_shard: int = None,
|
|
21443
22807
|
mode: str = None,
|
|
21444
22808
|
shard_count: int = None,
|
|
22809
|
+
sharding_policy: ShardingPolicy = None,
|
|
21445
22810
|
telemetry_type: str = None,
|
|
21446
22811
|
ttl: int = None,
|
|
21447
22812
|
):
|
|
@@ -21483,6 +22848,7 @@ class UpdateLogStoreRequest(TeaModel):
|
|
|
21483
22848
|
#
|
|
21484
22849
|
# > You cannot call the UpdateLogStore operation to change the number of shards. You can call the SplitShard or MergeShards operation to change the number of shards.
|
|
21485
22850
|
self.shard_count = shard_count
|
|
22851
|
+
self.sharding_policy = sharding_policy
|
|
21486
22852
|
# The type of the observable data. Valid values:
|
|
21487
22853
|
#
|
|
21488
22854
|
# * None (default): log data.
|
|
@@ -21496,6 +22862,8 @@ class UpdateLogStoreRequest(TeaModel):
|
|
|
21496
22862
|
def validate(self):
|
|
21497
22863
|
if self.encrypt_conf:
|
|
21498
22864
|
self.encrypt_conf.validate()
|
|
22865
|
+
if self.sharding_policy:
|
|
22866
|
+
self.sharding_policy.validate()
|
|
21499
22867
|
|
|
21500
22868
|
def to_map(self):
|
|
21501
22869
|
_map = super().to_map()
|
|
@@ -21523,6 +22891,8 @@ class UpdateLogStoreRequest(TeaModel):
|
|
|
21523
22891
|
result['mode'] = self.mode
|
|
21524
22892
|
if self.shard_count is not None:
|
|
21525
22893
|
result['shardCount'] = self.shard_count
|
|
22894
|
+
if self.sharding_policy is not None:
|
|
22895
|
+
result['shardingPolicy'] = self.sharding_policy.to_map()
|
|
21526
22896
|
if self.telemetry_type is not None:
|
|
21527
22897
|
result['telemetryType'] = self.telemetry_type
|
|
21528
22898
|
if self.ttl is not None:
|
|
@@ -21552,6 +22922,9 @@ class UpdateLogStoreRequest(TeaModel):
|
|
|
21552
22922
|
self.mode = m.get('mode')
|
|
21553
22923
|
if m.get('shardCount') is not None:
|
|
21554
22924
|
self.shard_count = m.get('shardCount')
|
|
22925
|
+
if m.get('shardingPolicy') is not None:
|
|
22926
|
+
temp_model = ShardingPolicy()
|
|
22927
|
+
self.sharding_policy = temp_model.from_map(m['shardingPolicy'])
|
|
21555
22928
|
if m.get('telemetryType') is not None:
|
|
21556
22929
|
self.telemetry_type = m.get('telemetryType')
|
|
21557
22930
|
if m.get('ttl') is not None:
|
|
@@ -22395,13 +23768,16 @@ class UpdateMaxComputeExportResponse(TeaModel):
|
|
|
22395
23768
|
class UpdateMetricStoreRequest(TeaModel):
|
|
22396
23769
|
def __init__(
|
|
22397
23770
|
self,
|
|
23771
|
+
append_meta: bool = None,
|
|
22398
23772
|
auto_split: bool = None,
|
|
22399
23773
|
hot_ttl: int = None,
|
|
22400
23774
|
infrequent_access_ttl: int = None,
|
|
22401
23775
|
max_split_shard: int = None,
|
|
22402
23776
|
mode: str = None,
|
|
23777
|
+
sharding_policy: ShardingPolicy = None,
|
|
22403
23778
|
ttl: int = None,
|
|
22404
23779
|
):
|
|
23780
|
+
self.append_meta = append_meta
|
|
22405
23781
|
# Specifies whether to enable automatic sharding.
|
|
22406
23782
|
self.auto_split = auto_split
|
|
22407
23783
|
self.hot_ttl = hot_ttl
|
|
@@ -22410,11 +23786,13 @@ class UpdateMetricStoreRequest(TeaModel):
|
|
|
22410
23786
|
self.max_split_shard = max_split_shard
|
|
22411
23787
|
# The type of the Metricstore.
|
|
22412
23788
|
self.mode = mode
|
|
23789
|
+
self.sharding_policy = sharding_policy
|
|
22413
23790
|
# The retention period of the metric data. Unit: days.
|
|
22414
23791
|
self.ttl = ttl
|
|
22415
23792
|
|
|
22416
23793
|
def validate(self):
|
|
22417
|
-
|
|
23794
|
+
if self.sharding_policy:
|
|
23795
|
+
self.sharding_policy.validate()
|
|
22418
23796
|
|
|
22419
23797
|
def to_map(self):
|
|
22420
23798
|
_map = super().to_map()
|
|
@@ -22422,6 +23800,8 @@ class UpdateMetricStoreRequest(TeaModel):
|
|
|
22422
23800
|
return _map
|
|
22423
23801
|
|
|
22424
23802
|
result = dict()
|
|
23803
|
+
if self.append_meta is not None:
|
|
23804
|
+
result['appendMeta'] = self.append_meta
|
|
22425
23805
|
if self.auto_split is not None:
|
|
22426
23806
|
result['autoSplit'] = self.auto_split
|
|
22427
23807
|
if self.hot_ttl is not None:
|
|
@@ -22432,12 +23812,16 @@ class UpdateMetricStoreRequest(TeaModel):
|
|
|
22432
23812
|
result['maxSplitShard'] = self.max_split_shard
|
|
22433
23813
|
if self.mode is not None:
|
|
22434
23814
|
result['mode'] = self.mode
|
|
23815
|
+
if self.sharding_policy is not None:
|
|
23816
|
+
result['shardingPolicy'] = self.sharding_policy.to_map()
|
|
22435
23817
|
if self.ttl is not None:
|
|
22436
23818
|
result['ttl'] = self.ttl
|
|
22437
23819
|
return result
|
|
22438
23820
|
|
|
22439
23821
|
def from_map(self, m: dict = None):
|
|
22440
23822
|
m = m or dict()
|
|
23823
|
+
if m.get('appendMeta') is not None:
|
|
23824
|
+
self.append_meta = m.get('appendMeta')
|
|
22441
23825
|
if m.get('autoSplit') is not None:
|
|
22442
23826
|
self.auto_split = m.get('autoSplit')
|
|
22443
23827
|
if m.get('hot_ttl') is not None:
|
|
@@ -22448,6 +23832,9 @@ class UpdateMetricStoreRequest(TeaModel):
|
|
|
22448
23832
|
self.max_split_shard = m.get('maxSplitShard')
|
|
22449
23833
|
if m.get('mode') is not None:
|
|
22450
23834
|
self.mode = m.get('mode')
|
|
23835
|
+
if m.get('shardingPolicy') is not None:
|
|
23836
|
+
temp_model = ShardingPolicy()
|
|
23837
|
+
self.sharding_policy = temp_model.from_map(m['shardingPolicy'])
|
|
22451
23838
|
if m.get('ttl') is not None:
|
|
22452
23839
|
self.ttl = m.get('ttl')
|
|
22453
23840
|
return self
|