alibabacloud-sls20201230 5.3.0__py3-none-any.whl → 5.4.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- alibabacloud_sls20201230/__init__.py +1 -1
- alibabacloud_sls20201230/client.py +1138 -442
- alibabacloud_sls20201230/models.py +898 -643
- {alibabacloud_sls20201230-5.3.0.dist-info → alibabacloud_sls20201230-5.4.1.dist-info}/METADATA +3 -3
- alibabacloud_sls20201230-5.4.1.dist-info/RECORD +8 -0
- alibabacloud_sls20201230-5.3.0.dist-info/RECORD +0 -8
- {alibabacloud_sls20201230-5.3.0.dist-info → alibabacloud_sls20201230-5.4.1.dist-info}/LICENSE +0 -0
- {alibabacloud_sls20201230-5.3.0.dist-info → alibabacloud_sls20201230-5.4.1.dist-info}/WHEEL +0 -0
- {alibabacloud_sls20201230-5.3.0.dist-info → alibabacloud_sls20201230-5.4.1.dist-info}/top_level.txt +0 -0
@@ -1344,6 +1344,148 @@ class Histogram(TeaModel):
|
|
1344
1344
|
return self
|
1345
1345
|
|
1346
1346
|
|
1347
|
+
class IndexJsonKey(TeaModel):
|
1348
|
+
def __init__(
|
1349
|
+
self,
|
1350
|
+
alias: str = None,
|
1351
|
+
case_sensitive: bool = None,
|
1352
|
+
chn: bool = None,
|
1353
|
+
doc_value: bool = None,
|
1354
|
+
token: List[str] = None,
|
1355
|
+
type: str = None,
|
1356
|
+
):
|
1357
|
+
self.alias = alias
|
1358
|
+
self.case_sensitive = case_sensitive
|
1359
|
+
self.chn = chn
|
1360
|
+
self.doc_value = doc_value
|
1361
|
+
self.token = token
|
1362
|
+
# This parameter is required.
|
1363
|
+
self.type = type
|
1364
|
+
|
1365
|
+
def validate(self):
|
1366
|
+
pass
|
1367
|
+
|
1368
|
+
def to_map(self):
|
1369
|
+
_map = super().to_map()
|
1370
|
+
if _map is not None:
|
1371
|
+
return _map
|
1372
|
+
|
1373
|
+
result = dict()
|
1374
|
+
if self.alias is not None:
|
1375
|
+
result['alias'] = self.alias
|
1376
|
+
if self.case_sensitive is not None:
|
1377
|
+
result['caseSensitive'] = self.case_sensitive
|
1378
|
+
if self.chn is not None:
|
1379
|
+
result['chn'] = self.chn
|
1380
|
+
if self.doc_value is not None:
|
1381
|
+
result['doc_value'] = self.doc_value
|
1382
|
+
if self.token is not None:
|
1383
|
+
result['token'] = self.token
|
1384
|
+
if self.type is not None:
|
1385
|
+
result['type'] = self.type
|
1386
|
+
return result
|
1387
|
+
|
1388
|
+
def from_map(self, m: dict = None):
|
1389
|
+
m = m or dict()
|
1390
|
+
if m.get('alias') is not None:
|
1391
|
+
self.alias = m.get('alias')
|
1392
|
+
if m.get('caseSensitive') is not None:
|
1393
|
+
self.case_sensitive = m.get('caseSensitive')
|
1394
|
+
if m.get('chn') is not None:
|
1395
|
+
self.chn = m.get('chn')
|
1396
|
+
if m.get('doc_value') is not None:
|
1397
|
+
self.doc_value = m.get('doc_value')
|
1398
|
+
if m.get('token') is not None:
|
1399
|
+
self.token = m.get('token')
|
1400
|
+
if m.get('type') is not None:
|
1401
|
+
self.type = m.get('type')
|
1402
|
+
return self
|
1403
|
+
|
1404
|
+
|
1405
|
+
class IndexKey(TeaModel):
|
1406
|
+
def __init__(
|
1407
|
+
self,
|
1408
|
+
alias: str = None,
|
1409
|
+
case_sensitive: bool = None,
|
1410
|
+
chn: bool = None,
|
1411
|
+
doc_value: bool = None,
|
1412
|
+
index_all: bool = None,
|
1413
|
+
json_keys: Dict[str, IndexJsonKey] = None,
|
1414
|
+
max_depth: int = None,
|
1415
|
+
token: List[str] = None,
|
1416
|
+
type: str = None,
|
1417
|
+
):
|
1418
|
+
self.alias = alias
|
1419
|
+
self.case_sensitive = case_sensitive
|
1420
|
+
self.chn = chn
|
1421
|
+
self.doc_value = doc_value
|
1422
|
+
self.index_all = index_all
|
1423
|
+
self.json_keys = json_keys
|
1424
|
+
self.max_depth = max_depth
|
1425
|
+
self.token = token
|
1426
|
+
# This parameter is required.
|
1427
|
+
self.type = type
|
1428
|
+
|
1429
|
+
def validate(self):
|
1430
|
+
if self.json_keys:
|
1431
|
+
for v in self.json_keys.values():
|
1432
|
+
if v:
|
1433
|
+
v.validate()
|
1434
|
+
|
1435
|
+
def to_map(self):
|
1436
|
+
_map = super().to_map()
|
1437
|
+
if _map is not None:
|
1438
|
+
return _map
|
1439
|
+
|
1440
|
+
result = dict()
|
1441
|
+
if self.alias is not None:
|
1442
|
+
result['alias'] = self.alias
|
1443
|
+
if self.case_sensitive is not None:
|
1444
|
+
result['caseSensitive'] = self.case_sensitive
|
1445
|
+
if self.chn is not None:
|
1446
|
+
result['chn'] = self.chn
|
1447
|
+
if self.doc_value is not None:
|
1448
|
+
result['doc_value'] = self.doc_value
|
1449
|
+
if self.index_all is not None:
|
1450
|
+
result['index_all'] = self.index_all
|
1451
|
+
result['json_keys'] = {}
|
1452
|
+
if self.json_keys is not None:
|
1453
|
+
for k, v in self.json_keys.items():
|
1454
|
+
result['json_keys'][k] = v.to_map()
|
1455
|
+
if self.max_depth is not None:
|
1456
|
+
result['max_depth'] = self.max_depth
|
1457
|
+
if self.token is not None:
|
1458
|
+
result['token'] = self.token
|
1459
|
+
if self.type is not None:
|
1460
|
+
result['type'] = self.type
|
1461
|
+
return result
|
1462
|
+
|
1463
|
+
def from_map(self, m: dict = None):
|
1464
|
+
m = m or dict()
|
1465
|
+
if m.get('alias') is not None:
|
1466
|
+
self.alias = m.get('alias')
|
1467
|
+
if m.get('caseSensitive') is not None:
|
1468
|
+
self.case_sensitive = m.get('caseSensitive')
|
1469
|
+
if m.get('chn') is not None:
|
1470
|
+
self.chn = m.get('chn')
|
1471
|
+
if m.get('doc_value') is not None:
|
1472
|
+
self.doc_value = m.get('doc_value')
|
1473
|
+
if m.get('index_all') is not None:
|
1474
|
+
self.index_all = m.get('index_all')
|
1475
|
+
self.json_keys = {}
|
1476
|
+
if m.get('json_keys') is not None:
|
1477
|
+
for k, v in m.get('json_keys').items():
|
1478
|
+
temp_model = IndexJsonKey()
|
1479
|
+
self.json_keys[k] = temp_model.from_map(v)
|
1480
|
+
if m.get('max_depth') is not None:
|
1481
|
+
self.max_depth = m.get('max_depth')
|
1482
|
+
if m.get('token') is not None:
|
1483
|
+
self.token = m.get('token')
|
1484
|
+
if m.get('type') is not None:
|
1485
|
+
self.type = m.get('type')
|
1486
|
+
return self
|
1487
|
+
|
1488
|
+
|
1347
1489
|
class IngestProcessorConfiguration(TeaModel):
|
1348
1490
|
def __init__(
|
1349
1491
|
self,
|
@@ -3789,85 +3931,22 @@ class IndexLine(TeaModel):
|
|
3789
3931
|
return self
|
3790
3932
|
|
3791
3933
|
|
3792
|
-
class IndexKeysValue(TeaModel):
|
3793
|
-
def __init__(
|
3794
|
-
self,
|
3795
|
-
chn: bool = None,
|
3796
|
-
case_sensitive: bool = None,
|
3797
|
-
token: List[str] = None,
|
3798
|
-
alias: str = None,
|
3799
|
-
type: str = None,
|
3800
|
-
doc_value: bool = None,
|
3801
|
-
):
|
3802
|
-
self.chn = chn
|
3803
|
-
self.case_sensitive = case_sensitive
|
3804
|
-
self.token = token
|
3805
|
-
self.alias = alias
|
3806
|
-
# This parameter is required.
|
3807
|
-
self.type = type
|
3808
|
-
self.doc_value = doc_value
|
3809
|
-
|
3810
|
-
def validate(self):
|
3811
|
-
pass
|
3812
|
-
|
3813
|
-
def to_map(self):
|
3814
|
-
_map = super().to_map()
|
3815
|
-
if _map is not None:
|
3816
|
-
return _map
|
3817
|
-
|
3818
|
-
result = dict()
|
3819
|
-
if self.chn is not None:
|
3820
|
-
result['chn'] = self.chn
|
3821
|
-
if self.case_sensitive is not None:
|
3822
|
-
result['caseSensitive'] = self.case_sensitive
|
3823
|
-
if self.token is not None:
|
3824
|
-
result['token'] = self.token
|
3825
|
-
if self.alias is not None:
|
3826
|
-
result['alias'] = self.alias
|
3827
|
-
if self.type is not None:
|
3828
|
-
result['type'] = self.type
|
3829
|
-
if self.doc_value is not None:
|
3830
|
-
result['doc_value'] = self.doc_value
|
3831
|
-
return result
|
3832
|
-
|
3833
|
-
def from_map(self, m: dict = None):
|
3834
|
-
m = m or dict()
|
3835
|
-
if m.get('chn') is not None:
|
3836
|
-
self.chn = m.get('chn')
|
3837
|
-
if m.get('caseSensitive') is not None:
|
3838
|
-
self.case_sensitive = m.get('caseSensitive')
|
3839
|
-
if m.get('token') is not None:
|
3840
|
-
self.token = m.get('token')
|
3841
|
-
if m.get('alias') is not None:
|
3842
|
-
self.alias = m.get('alias')
|
3843
|
-
if m.get('type') is not None:
|
3844
|
-
self.type = m.get('type')
|
3845
|
-
if m.get('doc_value') is not None:
|
3846
|
-
self.doc_value = m.get('doc_value')
|
3847
|
-
return self
|
3848
|
-
|
3849
|
-
|
3850
3934
|
class Index(TeaModel):
|
3851
3935
|
def __init__(
|
3852
3936
|
self,
|
3853
|
-
keys: Dict[str,
|
3854
|
-
last_modify_time: int = None,
|
3937
|
+
keys: Dict[str, IndexKey] = None,
|
3855
3938
|
line: IndexLine = None,
|
3856
3939
|
log_reduce: bool = None,
|
3857
3940
|
log_reduce_black_list: List[str] = None,
|
3858
3941
|
log_reduce_white_list: List[str] = None,
|
3859
3942
|
max_text_len: int = None,
|
3860
|
-
ttl: int = None,
|
3861
3943
|
):
|
3862
3944
|
self.keys = keys
|
3863
|
-
self.last_modify_time = last_modify_time
|
3864
3945
|
self.line = line
|
3865
3946
|
self.log_reduce = log_reduce
|
3866
3947
|
self.log_reduce_black_list = log_reduce_black_list
|
3867
3948
|
self.log_reduce_white_list = log_reduce_white_list
|
3868
3949
|
self.max_text_len = max_text_len
|
3869
|
-
# This parameter is required.
|
3870
|
-
self.ttl = ttl
|
3871
3950
|
|
3872
3951
|
def validate(self):
|
3873
3952
|
if self.keys:
|
@@ -3887,8 +3966,6 @@ class Index(TeaModel):
|
|
3887
3966
|
if self.keys is not None:
|
3888
3967
|
for k, v in self.keys.items():
|
3889
3968
|
result['keys'][k] = v.to_map()
|
3890
|
-
if self.last_modify_time is not None:
|
3891
|
-
result['lastModifyTime'] = self.last_modify_time
|
3892
3969
|
if self.line is not None:
|
3893
3970
|
result['line'] = self.line.to_map()
|
3894
3971
|
if self.log_reduce is not None:
|
@@ -3899,8 +3976,6 @@ class Index(TeaModel):
|
|
3899
3976
|
result['log_reduce_white_list'] = self.log_reduce_white_list
|
3900
3977
|
if self.max_text_len is not None:
|
3901
3978
|
result['max_text_len'] = self.max_text_len
|
3902
|
-
if self.ttl is not None:
|
3903
|
-
result['ttl'] = self.ttl
|
3904
3979
|
return result
|
3905
3980
|
|
3906
3981
|
def from_map(self, m: dict = None):
|
@@ -3908,10 +3983,8 @@ class Index(TeaModel):
|
|
3908
3983
|
self.keys = {}
|
3909
3984
|
if m.get('keys') is not None:
|
3910
3985
|
for k, v in m.get('keys').items():
|
3911
|
-
temp_model =
|
3986
|
+
temp_model = IndexKey()
|
3912
3987
|
self.keys[k] = temp_model.from_map(v)
|
3913
|
-
if m.get('lastModifyTime') is not None:
|
3914
|
-
self.last_modify_time = m.get('lastModifyTime')
|
3915
3988
|
if m.get('line') is not None:
|
3916
3989
|
temp_model = IndexLine()
|
3917
3990
|
self.line = temp_model.from_map(m['line'])
|
@@ -3923,8 +3996,6 @@ class Index(TeaModel):
|
|
3923
3996
|
self.log_reduce_white_list = m.get('log_reduce_white_list')
|
3924
3997
|
if m.get('max_text_len') is not None:
|
3925
3998
|
self.max_text_len = m.get('max_text_len')
|
3926
|
-
if m.get('ttl') is not None:
|
3927
|
-
self.ttl = m.get('ttl')
|
3928
3999
|
return self
|
3929
4000
|
|
3930
4001
|
|
@@ -4020,6 +4091,7 @@ class Logstore(TeaModel):
|
|
4020
4091
|
logstore_name: str = None,
|
4021
4092
|
max_split_shard: int = None,
|
4022
4093
|
mode: str = None,
|
4094
|
+
processor_id: str = None,
|
4023
4095
|
product_type: str = None,
|
4024
4096
|
shard_count: int = None,
|
4025
4097
|
telemetry_type: str = None,
|
@@ -4037,6 +4109,7 @@ class Logstore(TeaModel):
|
|
4037
4109
|
self.logstore_name = logstore_name
|
4038
4110
|
self.max_split_shard = max_split_shard
|
4039
4111
|
self.mode = mode
|
4112
|
+
self.processor_id = processor_id
|
4040
4113
|
self.product_type = product_type
|
4041
4114
|
# This parameter is required.
|
4042
4115
|
self.shard_count = shard_count
|
@@ -4076,6 +4149,8 @@ class Logstore(TeaModel):
|
|
4076
4149
|
result['maxSplitShard'] = self.max_split_shard
|
4077
4150
|
if self.mode is not None:
|
4078
4151
|
result['mode'] = self.mode
|
4152
|
+
if self.processor_id is not None:
|
4153
|
+
result['processorId'] = self.processor_id
|
4079
4154
|
if self.product_type is not None:
|
4080
4155
|
result['productType'] = self.product_type
|
4081
4156
|
if self.shard_count is not None:
|
@@ -4111,6 +4186,8 @@ class Logstore(TeaModel):
|
|
4111
4186
|
self.max_split_shard = m.get('maxSplitShard')
|
4112
4187
|
if m.get('mode') is not None:
|
4113
4188
|
self.mode = m.get('mode')
|
4189
|
+
if m.get('processorId') is not None:
|
4190
|
+
self.processor_id = m.get('processorId')
|
4114
4191
|
if m.get('productType') is not None:
|
4115
4192
|
self.product_type = m.get('productType')
|
4116
4193
|
if m.get('shardCount') is not None:
|
@@ -4423,89 +4500,6 @@ class Shard(TeaModel):
|
|
4423
4500
|
return self
|
4424
4501
|
|
4425
4502
|
|
4426
|
-
class KeysValue(TeaModel):
|
4427
|
-
def __init__(
|
4428
|
-
self,
|
4429
|
-
case_sensitive: bool = None,
|
4430
|
-
chn: bool = None,
|
4431
|
-
type: str = None,
|
4432
|
-
alias: str = None,
|
4433
|
-
token: List[str] = None,
|
4434
|
-
doc_value: bool = None,
|
4435
|
-
vector_index: str = None,
|
4436
|
-
embedding: str = None,
|
4437
|
-
):
|
4438
|
-
# Specifies whether to enable case sensitivity. This parameter is required only when **type** is set to **text**. Valid values:
|
4439
|
-
#
|
4440
|
-
# * true
|
4441
|
-
# * false (default)
|
4442
|
-
self.case_sensitive = case_sensitive
|
4443
|
-
# Specifies whether to include Chinese characters. This parameter is required only when **type** is set to **text**. Valid values:
|
4444
|
-
#
|
4445
|
-
# * true
|
4446
|
-
# * false (default)
|
4447
|
-
self.chn = chn
|
4448
|
-
# The data type of the field value. Valid values: text, json, double, and long.
|
4449
|
-
#
|
4450
|
-
# This parameter is required.
|
4451
|
-
self.type = type
|
4452
|
-
# The alias of the field.
|
4453
|
-
self.alias = alias
|
4454
|
-
# The delimiters that are used to split text.
|
4455
|
-
self.token = token
|
4456
|
-
# Specifies whether to turn on Enable Analytics for the field.
|
4457
|
-
self.doc_value = doc_value
|
4458
|
-
self.vector_index = vector_index
|
4459
|
-
self.embedding = embedding
|
4460
|
-
|
4461
|
-
def validate(self):
|
4462
|
-
pass
|
4463
|
-
|
4464
|
-
def to_map(self):
|
4465
|
-
_map = super().to_map()
|
4466
|
-
if _map is not None:
|
4467
|
-
return _map
|
4468
|
-
|
4469
|
-
result = dict()
|
4470
|
-
if self.case_sensitive is not None:
|
4471
|
-
result['caseSensitive'] = self.case_sensitive
|
4472
|
-
if self.chn is not None:
|
4473
|
-
result['chn'] = self.chn
|
4474
|
-
if self.type is not None:
|
4475
|
-
result['type'] = self.type
|
4476
|
-
if self.alias is not None:
|
4477
|
-
result['alias'] = self.alias
|
4478
|
-
if self.token is not None:
|
4479
|
-
result['token'] = self.token
|
4480
|
-
if self.doc_value is not None:
|
4481
|
-
result['doc_value'] = self.doc_value
|
4482
|
-
if self.vector_index is not None:
|
4483
|
-
result['vector_index'] = self.vector_index
|
4484
|
-
if self.embedding is not None:
|
4485
|
-
result['embedding'] = self.embedding
|
4486
|
-
return result
|
4487
|
-
|
4488
|
-
def from_map(self, m: dict = None):
|
4489
|
-
m = m or dict()
|
4490
|
-
if m.get('caseSensitive') is not None:
|
4491
|
-
self.case_sensitive = m.get('caseSensitive')
|
4492
|
-
if m.get('chn') is not None:
|
4493
|
-
self.chn = m.get('chn')
|
4494
|
-
if m.get('type') is not None:
|
4495
|
-
self.type = m.get('type')
|
4496
|
-
if m.get('alias') is not None:
|
4497
|
-
self.alias = m.get('alias')
|
4498
|
-
if m.get('token') is not None:
|
4499
|
-
self.token = m.get('token')
|
4500
|
-
if m.get('doc_value') is not None:
|
4501
|
-
self.doc_value = m.get('doc_value')
|
4502
|
-
if m.get('vector_index') is not None:
|
4503
|
-
self.vector_index = m.get('vector_index')
|
4504
|
-
if m.get('embedding') is not None:
|
4505
|
-
self.embedding = m.get('embedding')
|
4506
|
-
return self
|
4507
|
-
|
4508
|
-
|
4509
4503
|
class ApplyConfigToMachineGroupResponse(TeaModel):
|
4510
4504
|
def __init__(
|
4511
4505
|
self,
|
@@ -4796,13 +4790,22 @@ class CreateAlertRequest(TeaModel):
|
|
4796
4790
|
name: str = None,
|
4797
4791
|
schedule: Schedule = None,
|
4798
4792
|
):
|
4793
|
+
# The detailed configurations of the alert rule.
|
4794
|
+
#
|
4799
4795
|
# This parameter is required.
|
4800
4796
|
self.configuration = configuration
|
4797
|
+
# The description of the alert rule.
|
4801
4798
|
self.description = description
|
4799
|
+
# The display name of the alert rule.
|
4800
|
+
#
|
4802
4801
|
# This parameter is required.
|
4803
4802
|
self.display_name = display_name
|
4803
|
+
# The name of the alert rule. Make sure that the name is unique in a project.
|
4804
|
+
#
|
4804
4805
|
# This parameter is required.
|
4805
4806
|
self.name = name
|
4807
|
+
# The scheduling configurations of the alert rule.
|
4808
|
+
#
|
4806
4809
|
# This parameter is required.
|
4807
4810
|
self.schedule = schedule
|
4808
4811
|
|
@@ -5614,103 +5617,17 @@ class CreateETLResponse(TeaModel):
|
|
5614
5617
|
return self
|
5615
5618
|
|
5616
5619
|
|
5617
|
-
class CreateIndexRequestLine(TeaModel):
|
5618
|
-
def __init__(
|
5619
|
-
self,
|
5620
|
-
case_sensitive: bool = None,
|
5621
|
-
chn: bool = None,
|
5622
|
-
exclude_keys: List[str] = None,
|
5623
|
-
include_keys: List[str] = None,
|
5624
|
-
token: List[str] = None,
|
5625
|
-
):
|
5626
|
-
# Specifies whether to enable case sensitivity. Valid values:
|
5627
|
-
#
|
5628
|
-
# * true
|
5629
|
-
# * false (default)
|
5630
|
-
self.case_sensitive = case_sensitive
|
5631
|
-
# Specifies whether to include Chinese characters. Valid values:
|
5632
|
-
#
|
5633
|
-
# * true
|
5634
|
-
# * false (default)
|
5635
|
-
self.chn = chn
|
5636
|
-
# The excluded fields. You cannot specify both include_keys and exclude_keys.
|
5637
|
-
self.exclude_keys = exclude_keys
|
5638
|
-
# The included fields. You cannot specify both include_keys and exclude_keys.
|
5639
|
-
self.include_keys = include_keys
|
5640
|
-
# The delimiters. You can specify a delimiter to delimit the content of a field value. For more information about delimiters, see Example.
|
5641
|
-
#
|
5642
|
-
# This parameter is required.
|
5643
|
-
self.token = token
|
5644
|
-
|
5645
|
-
def validate(self):
|
5646
|
-
pass
|
5647
|
-
|
5648
|
-
def to_map(self):
|
5649
|
-
_map = super().to_map()
|
5650
|
-
if _map is not None:
|
5651
|
-
return _map
|
5652
|
-
|
5653
|
-
result = dict()
|
5654
|
-
if self.case_sensitive is not None:
|
5655
|
-
result['caseSensitive'] = self.case_sensitive
|
5656
|
-
if self.chn is not None:
|
5657
|
-
result['chn'] = self.chn
|
5658
|
-
if self.exclude_keys is not None:
|
5659
|
-
result['exclude_keys'] = self.exclude_keys
|
5660
|
-
if self.include_keys is not None:
|
5661
|
-
result['include_keys'] = self.include_keys
|
5662
|
-
if self.token is not None:
|
5663
|
-
result['token'] = self.token
|
5664
|
-
return result
|
5665
|
-
|
5666
|
-
def from_map(self, m: dict = None):
|
5667
|
-
m = m or dict()
|
5668
|
-
if m.get('caseSensitive') is not None:
|
5669
|
-
self.case_sensitive = m.get('caseSensitive')
|
5670
|
-
if m.get('chn') is not None:
|
5671
|
-
self.chn = m.get('chn')
|
5672
|
-
if m.get('exclude_keys') is not None:
|
5673
|
-
self.exclude_keys = m.get('exclude_keys')
|
5674
|
-
if m.get('include_keys') is not None:
|
5675
|
-
self.include_keys = m.get('include_keys')
|
5676
|
-
if m.get('token') is not None:
|
5677
|
-
self.token = m.get('token')
|
5678
|
-
return self
|
5679
|
-
|
5680
|
-
|
5681
5620
|
class CreateIndexRequest(TeaModel):
|
5682
5621
|
def __init__(
|
5683
5622
|
self,
|
5684
|
-
|
5685
|
-
line: CreateIndexRequestLine = None,
|
5686
|
-
log_reduce: bool = None,
|
5687
|
-
log_reduce_black_list: List[str] = None,
|
5688
|
-
log_reduce_white_list: List[str] = None,
|
5689
|
-
max_text_len: int = None,
|
5690
|
-
ttl: int = None,
|
5623
|
+
body: Index = None,
|
5691
5624
|
):
|
5692
|
-
# The
|
5693
|
-
self.
|
5694
|
-
# The configuration of full-text indexes. You must specify this parameter, the keys parameter, or both parameters. For more information, see Example.
|
5695
|
-
self.line = line
|
5696
|
-
# Specifies whether to turn on LogReduce. After you turn on LogReduce, either the whitelist or blacklist takes effect.
|
5697
|
-
self.log_reduce = log_reduce
|
5698
|
-
# The fields in the blacklist that you want to use to cluster logs.
|
5699
|
-
self.log_reduce_black_list = log_reduce_black_list
|
5700
|
-
# The fields in the whitelist that you want to use to cluster logs.
|
5701
|
-
self.log_reduce_white_list = log_reduce_white_list
|
5702
|
-
# The maximum length of a field value that can be retained. Default value: 2048. Unit: bytes. The default value is equal to 2 KB. You can change the value of max_text_len. Valid values: 64 to 16384.
|
5703
|
-
self.max_text_len = max_text_len
|
5704
|
-
# The retention period of logs. Unit: days. Valid values: 7, 30, and 90.
|
5705
|
-
self.ttl = ttl
|
5625
|
+
# The request body.
|
5626
|
+
self.body = body
|
5706
5627
|
|
5707
5628
|
def validate(self):
|
5708
|
-
if self.
|
5709
|
-
|
5710
|
-
if v:
|
5711
|
-
v.validate()
|
5712
|
-
if self.line:
|
5713
|
-
self.line.validate()
|
5629
|
+
if self.body:
|
5630
|
+
self.body.validate()
|
5714
5631
|
|
5715
5632
|
def to_map(self):
|
5716
5633
|
_map = super().to_map()
|
@@ -5718,44 +5635,15 @@ class CreateIndexRequest(TeaModel):
|
|
5718
5635
|
return _map
|
5719
5636
|
|
5720
5637
|
result = dict()
|
5721
|
-
|
5722
|
-
|
5723
|
-
for k, v in self.keys.items():
|
5724
|
-
result['keys'][k] = v.to_map()
|
5725
|
-
if self.line is not None:
|
5726
|
-
result['line'] = self.line.to_map()
|
5727
|
-
if self.log_reduce is not None:
|
5728
|
-
result['log_reduce'] = self.log_reduce
|
5729
|
-
if self.log_reduce_black_list is not None:
|
5730
|
-
result['log_reduce_black_list'] = self.log_reduce_black_list
|
5731
|
-
if self.log_reduce_white_list is not None:
|
5732
|
-
result['log_reduce_white_list'] = self.log_reduce_white_list
|
5733
|
-
if self.max_text_len is not None:
|
5734
|
-
result['max_text_len'] = self.max_text_len
|
5735
|
-
if self.ttl is not None:
|
5736
|
-
result['ttl'] = self.ttl
|
5638
|
+
if self.body is not None:
|
5639
|
+
result['body'] = self.body.to_map()
|
5737
5640
|
return result
|
5738
5641
|
|
5739
5642
|
def from_map(self, m: dict = None):
|
5740
5643
|
m = m or dict()
|
5741
|
-
|
5742
|
-
|
5743
|
-
|
5744
|
-
temp_model = KeysValue()
|
5745
|
-
self.keys[k] = temp_model.from_map(v)
|
5746
|
-
if m.get('line') is not None:
|
5747
|
-
temp_model = CreateIndexRequestLine()
|
5748
|
-
self.line = temp_model.from_map(m['line'])
|
5749
|
-
if m.get('log_reduce') is not None:
|
5750
|
-
self.log_reduce = m.get('log_reduce')
|
5751
|
-
if m.get('log_reduce_black_list') is not None:
|
5752
|
-
self.log_reduce_black_list = m.get('log_reduce_black_list')
|
5753
|
-
if m.get('log_reduce_white_list') is not None:
|
5754
|
-
self.log_reduce_white_list = m.get('log_reduce_white_list')
|
5755
|
-
if m.get('max_text_len') is not None:
|
5756
|
-
self.max_text_len = m.get('max_text_len')
|
5757
|
-
if m.get('ttl') is not None:
|
5758
|
-
self.ttl = m.get('ttl')
|
5644
|
+
if m.get('body') is not None:
|
5645
|
+
temp_model = Index()
|
5646
|
+
self.body = temp_model.from_map(m['body'])
|
5759
5647
|
return self
|
5760
5648
|
|
5761
5649
|
|
@@ -5804,31 +5692,33 @@ class CreateLogStoreRequest(TeaModel):
|
|
5804
5692
|
logstore_name: str = None,
|
5805
5693
|
max_split_shard: int = None,
|
5806
5694
|
mode: str = None,
|
5695
|
+
processor_id: str = None,
|
5807
5696
|
shard_count: int = None,
|
5808
5697
|
telemetry_type: str = None,
|
5809
5698
|
ttl: int = None,
|
5810
5699
|
):
|
5811
|
-
# Specifies whether to record public IP addresses. Default value: false.
|
5700
|
+
# Specifies whether to record public IP addresses. Default value: false.
|
5812
5701
|
#
|
5813
5702
|
# * true
|
5814
5703
|
# * false
|
5815
5704
|
self.append_meta = append_meta
|
5816
|
-
# Specifies whether to enable automatic sharding.
|
5705
|
+
# Specifies whether to enable automatic sharding.
|
5817
5706
|
#
|
5818
5707
|
# * true
|
5819
5708
|
# * false
|
5820
5709
|
self.auto_split = auto_split
|
5821
|
-
# Specifies whether to enable the web tracking feature. Default value: false.
|
5710
|
+
# Specifies whether to enable the web tracking feature. Default value: false.
|
5822
5711
|
#
|
5823
5712
|
# * true
|
5824
5713
|
# * false
|
5825
5714
|
self.enable_tracking = enable_tracking
|
5826
|
-
# The data structure of the encryption configuration.
|
5715
|
+
# The data structure of the encryption configuration. The following parameters are included: `enable`, `encrypt_type`, and `user_cmk_info`. For more information, see [EncryptConf](https://help.aliyun.com/document_detail/409461.html).
|
5827
5716
|
self.encrypt_conf = encrypt_conf
|
5828
|
-
# The retention period of data in the hot storage tier of the Logstore.
|
5717
|
+
# The retention period of data in the hot storage tier of the Logstore. Valid values: 7 to 3000. Unit: days.
|
5829
5718
|
#
|
5830
|
-
#
|
5719
|
+
# After the retention period that is specified for the hot storage tier elapses, the data is moved to the Infrequent Access (IA) storage tier. For more information, see [Enable hot and cold-tiered storage for a Logstore](https://help.aliyun.com/document_detail/308645.html).
|
5831
5720
|
self.hot_ttl = hot_ttl
|
5721
|
+
# The retention period of data in the IA storage tier of the Logstore. You must set this parameter to at least 30 days. After the data retention period that you specify for the IA storage tier elapses, the data is moved to the Archive storage tier.
|
5832
5722
|
self.infrequent_access_ttl = infrequent_access_ttl
|
5833
5723
|
# The name of the Logstore. The name must meet the following requirements:
|
5834
5724
|
#
|
@@ -5839,27 +5729,28 @@ class CreateLogStoreRequest(TeaModel):
|
|
5839
5729
|
#
|
5840
5730
|
# This parameter is required.
|
5841
5731
|
self.logstore_name = logstore_name
|
5842
|
-
# The maximum number of shards into which existing shards can be automatically split. Valid values: 1 to
|
5732
|
+
# The maximum number of shards into which existing shards can be automatically split. Valid values: 1 to 256.
|
5843
5733
|
#
|
5844
|
-
# >
|
5734
|
+
# > If you set autoSplit to true, you must specify maxSplitShard.
|
5845
5735
|
self.max_split_shard = max_split_shard
|
5846
|
-
# The type of the Logstore. Log Service provides
|
5736
|
+
# The type of the Logstore. Simple Log Service provides two types of Logstores: Standard Logstores and Query Logstores. Valid values:
|
5847
5737
|
#
|
5848
5738
|
# * **standard**: Standard Logstore. This type of Logstore supports the log analysis feature and is suitable for scenarios such as real-time monitoring and interactive analysis. You can also use this type of Logstore to build a comprehensive observability system.
|
5849
|
-
# * **query**: Query Logstore. This type of Logstore supports high-performance queries. The index traffic fee of a
|
5739
|
+
# * **query**: Query Logstore. This type of Logstore supports high-performance queries. The index traffic fee of a query Logstore is approximately half that of a Standard Logstore. Query Logstores do not support SQL analysis. Query Logstores are suitable for scenarios in which the amount of data is large, the log retention period is long, or log analysis is not required. If logs are stored for weeks or months, the log retention period is considered long.
|
5850
5740
|
self.mode = mode
|
5741
|
+
self.processor_id = processor_id
|
5851
5742
|
# The number of shards.
|
5852
5743
|
#
|
5853
|
-
# >
|
5744
|
+
# > You cannot call the CreateLogStore operation to change the number of shards. You can call the SplitShard or MergeShards operation to change the number of shards.
|
5854
5745
|
#
|
5855
5746
|
# This parameter is required.
|
5856
5747
|
self.shard_count = shard_count
|
5857
5748
|
# The type of the observable data. Valid values:
|
5858
5749
|
#
|
5859
|
-
# * None:
|
5860
|
-
# * Metrics
|
5750
|
+
# * **None** (default): log data
|
5751
|
+
# * **Metrics**: metric data
|
5861
5752
|
self.telemetry_type = telemetry_type
|
5862
|
-
# The retention period of data. Unit: days. Valid values: 1 to 3000. If you set this parameter to 3650,
|
5753
|
+
# The retention period of data. Unit: days. Valid values: 1 to 3000. If you set this parameter to 3650, logs are permanently stored.
|
5863
5754
|
#
|
5864
5755
|
# This parameter is required.
|
5865
5756
|
self.ttl = ttl
|
@@ -5892,6 +5783,8 @@ class CreateLogStoreRequest(TeaModel):
|
|
5892
5783
|
result['maxSplitShard'] = self.max_split_shard
|
5893
5784
|
if self.mode is not None:
|
5894
5785
|
result['mode'] = self.mode
|
5786
|
+
if self.processor_id is not None:
|
5787
|
+
result['processorId'] = self.processor_id
|
5895
5788
|
if self.shard_count is not None:
|
5896
5789
|
result['shardCount'] = self.shard_count
|
5897
5790
|
if self.telemetry_type is not None:
|
@@ -5921,6 +5814,8 @@ class CreateLogStoreRequest(TeaModel):
|
|
5921
5814
|
self.max_split_shard = m.get('maxSplitShard')
|
5922
5815
|
if m.get('mode') is not None:
|
5923
5816
|
self.mode = m.get('mode')
|
5817
|
+
if m.get('processorId') is not None:
|
5818
|
+
self.processor_id = m.get('processorId')
|
5924
5819
|
if m.get('shardCount') is not None:
|
5925
5820
|
self.shard_count = m.get('shardCount')
|
5926
5821
|
if m.get('telemetryType') is not None:
|
@@ -6343,6 +6238,105 @@ class CreateMachineGroupResponse(TeaModel):
|
|
6343
6238
|
return self
|
6344
6239
|
|
6345
6240
|
|
6241
|
+
class CreateMetricStoreRequest(TeaModel):
|
6242
|
+
def __init__(
|
6243
|
+
self,
|
6244
|
+
auto_split: bool = None,
|
6245
|
+
max_split_shard: int = None,
|
6246
|
+
metric_type: str = None,
|
6247
|
+
mode: str = None,
|
6248
|
+
name: str = None,
|
6249
|
+
shard_count: int = None,
|
6250
|
+
ttl: int = None,
|
6251
|
+
):
|
6252
|
+
self.auto_split = auto_split
|
6253
|
+
self.max_split_shard = max_split_shard
|
6254
|
+
self.metric_type = metric_type
|
6255
|
+
self.mode = mode
|
6256
|
+
# This parameter is required.
|
6257
|
+
self.name = name
|
6258
|
+
# This parameter is required.
|
6259
|
+
self.shard_count = shard_count
|
6260
|
+
# This parameter is required.
|
6261
|
+
self.ttl = ttl
|
6262
|
+
|
6263
|
+
def validate(self):
|
6264
|
+
pass
|
6265
|
+
|
6266
|
+
def to_map(self):
|
6267
|
+
_map = super().to_map()
|
6268
|
+
if _map is not None:
|
6269
|
+
return _map
|
6270
|
+
|
6271
|
+
result = dict()
|
6272
|
+
if self.auto_split is not None:
|
6273
|
+
result['autoSplit'] = self.auto_split
|
6274
|
+
if self.max_split_shard is not None:
|
6275
|
+
result['maxSplitShard'] = self.max_split_shard
|
6276
|
+
if self.metric_type is not None:
|
6277
|
+
result['metricType'] = self.metric_type
|
6278
|
+
if self.mode is not None:
|
6279
|
+
result['mode'] = self.mode
|
6280
|
+
if self.name is not None:
|
6281
|
+
result['name'] = self.name
|
6282
|
+
if self.shard_count is not None:
|
6283
|
+
result['shardCount'] = self.shard_count
|
6284
|
+
if self.ttl is not None:
|
6285
|
+
result['ttl'] = self.ttl
|
6286
|
+
return result
|
6287
|
+
|
6288
|
+
def from_map(self, m: dict = None):
|
6289
|
+
m = m or dict()
|
6290
|
+
if m.get('autoSplit') is not None:
|
6291
|
+
self.auto_split = m.get('autoSplit')
|
6292
|
+
if m.get('maxSplitShard') is not None:
|
6293
|
+
self.max_split_shard = m.get('maxSplitShard')
|
6294
|
+
if m.get('metricType') is not None:
|
6295
|
+
self.metric_type = m.get('metricType')
|
6296
|
+
if m.get('mode') is not None:
|
6297
|
+
self.mode = m.get('mode')
|
6298
|
+
if m.get('name') is not None:
|
6299
|
+
self.name = m.get('name')
|
6300
|
+
if m.get('shardCount') is not None:
|
6301
|
+
self.shard_count = m.get('shardCount')
|
6302
|
+
if m.get('ttl') is not None:
|
6303
|
+
self.ttl = m.get('ttl')
|
6304
|
+
return self
|
6305
|
+
|
6306
|
+
|
6307
|
+
class CreateMetricStoreResponse(TeaModel):
|
6308
|
+
def __init__(
|
6309
|
+
self,
|
6310
|
+
headers: Dict[str, str] = None,
|
6311
|
+
status_code: int = None,
|
6312
|
+
):
|
6313
|
+
self.headers = headers
|
6314
|
+
self.status_code = status_code
|
6315
|
+
|
6316
|
+
def validate(self):
|
6317
|
+
pass
|
6318
|
+
|
6319
|
+
def to_map(self):
|
6320
|
+
_map = super().to_map()
|
6321
|
+
if _map is not None:
|
6322
|
+
return _map
|
6323
|
+
|
6324
|
+
result = dict()
|
6325
|
+
if self.headers is not None:
|
6326
|
+
result['headers'] = self.headers
|
6327
|
+
if self.status_code is not None:
|
6328
|
+
result['statusCode'] = self.status_code
|
6329
|
+
return result
|
6330
|
+
|
6331
|
+
def from_map(self, m: dict = None):
|
6332
|
+
m = m or dict()
|
6333
|
+
if m.get('headers') is not None:
|
6334
|
+
self.headers = m.get('headers')
|
6335
|
+
if m.get('statusCode') is not None:
|
6336
|
+
self.status_code = m.get('statusCode')
|
6337
|
+
return self
|
6338
|
+
|
6339
|
+
|
6346
6340
|
class CreateOSSExportRequest(TeaModel):
|
6347
6341
|
def __init__(
|
6348
6342
|
self,
|
@@ -6351,11 +6345,18 @@ class CreateOSSExportRequest(TeaModel):
|
|
6351
6345
|
display_name: str = None,
|
6352
6346
|
name: str = None,
|
6353
6347
|
):
|
6348
|
+
# The configuration details of the job.
|
6349
|
+
#
|
6354
6350
|
# This parameter is required.
|
6355
6351
|
self.configuration = configuration
|
6352
|
+
# The description of the job.
|
6356
6353
|
self.description = description
|
6354
|
+
# The display name of the job.
|
6355
|
+
#
|
6357
6356
|
# This parameter is required.
|
6358
6357
|
self.display_name = display_name
|
6358
|
+
# The unique identifier of the OSS data shipping job.
|
6359
|
+
#
|
6359
6360
|
# This parameter is required.
|
6360
6361
|
self.name = name
|
6361
6362
|
|
@@ -6434,11 +6435,18 @@ class CreateOSSHDFSExportRequest(TeaModel):
|
|
6434
6435
|
display_name: str = None,
|
6435
6436
|
name: str = None,
|
6436
6437
|
):
|
6438
|
+
# The configuration details of the job.
|
6439
|
+
#
|
6437
6440
|
# This parameter is required.
|
6438
6441
|
self.configuration = configuration
|
6442
|
+
# The description of the job.
|
6439
6443
|
self.description = description
|
6444
|
+
# The display name of the job.
|
6445
|
+
#
|
6440
6446
|
# This parameter is required.
|
6441
6447
|
self.display_name = display_name
|
6448
|
+
# The unique identifier of the OSS data shipping job.
|
6449
|
+
#
|
6442
6450
|
# This parameter is required.
|
6443
6451
|
self.name = name
|
6444
6452
|
|
@@ -6611,7 +6619,7 @@ class CreateOssExternalStoreRequestParameterColumns(TeaModel):
|
|
6611
6619
|
#
|
6612
6620
|
# This parameter is required.
|
6613
6621
|
self.name = name
|
6614
|
-
# The type of the field.
|
6622
|
+
# The data type of the field.
|
6615
6623
|
#
|
6616
6624
|
# This parameter is required.
|
6617
6625
|
self.type = type
|
@@ -6650,11 +6658,11 @@ class CreateOssExternalStoreRequestParameter(TeaModel):
|
|
6650
6658
|
endpoint: str = None,
|
6651
6659
|
objects: List[str] = None,
|
6652
6660
|
):
|
6653
|
-
# The AccessKey ID
|
6661
|
+
# The AccessKey ID.
|
6654
6662
|
#
|
6655
6663
|
# This parameter is required.
|
6656
6664
|
self.accessid = accessid
|
6657
|
-
# The AccessKey secret
|
6665
|
+
# The AccessKey secret.
|
6658
6666
|
#
|
6659
6667
|
# This parameter is required.
|
6660
6668
|
self.accesskey = accesskey
|
@@ -6662,15 +6670,15 @@ class CreateOssExternalStoreRequestParameter(TeaModel):
|
|
6662
6670
|
#
|
6663
6671
|
# This parameter is required.
|
6664
6672
|
self.bucket = bucket
|
6665
|
-
# The
|
6673
|
+
# The associated fields.
|
6666
6674
|
#
|
6667
6675
|
# This parameter is required.
|
6668
6676
|
self.columns = columns
|
6669
|
-
# The
|
6677
|
+
# The OSS endpoint. For more information, see [Regions and endpoints](https://help.aliyun.com/document_detail/31837.html).
|
6670
6678
|
#
|
6671
6679
|
# This parameter is required.
|
6672
6680
|
self.endpoint = endpoint
|
6673
|
-
# The
|
6681
|
+
# The associated OSS objects. Valid values of n: 1 to 100.
|
6674
6682
|
#
|
6675
6683
|
# This parameter is required.
|
6676
6684
|
self.objects = objects
|
@@ -6734,7 +6742,7 @@ class CreateOssExternalStoreRequest(TeaModel):
|
|
6734
6742
|
#
|
6735
6743
|
# This parameter is required.
|
6736
6744
|
self.external_store_name = external_store_name
|
6737
|
-
# The parameters
|
6745
|
+
# The parameters of the external store.
|
6738
6746
|
#
|
6739
6747
|
# This parameter is required.
|
6740
6748
|
self.parameter = parameter
|
@@ -7272,8 +7280,12 @@ class CreateSqlInstanceRequest(TeaModel):
|
|
7272
7280
|
cu: int = None,
|
7273
7281
|
use_as_default: bool = None,
|
7274
7282
|
):
|
7283
|
+
# The number of compute units (CUs). When you use the Dedicated SQL feature, CUs are used in parallel.
|
7284
|
+
#
|
7275
7285
|
# This parameter is required.
|
7276
7286
|
self.cu = cu
|
7287
|
+
# Specifies whether to enable the Dedicated SQL feature for the project. If you set this parameter to true, the Dedicated SQL feature is enabled for the specified project and takes effect for all query statements that you execute in the project, including the query statements for alerts and dashboards.
|
7288
|
+
#
|
7277
7289
|
# This parameter is required.
|
7278
7290
|
self.use_as_default = use_as_default
|
7279
7291
|
|
@@ -7341,10 +7353,20 @@ class CreateStoreViewRequest(TeaModel):
|
|
7341
7353
|
store_type: str = None,
|
7342
7354
|
stores: List[StoreViewStore] = None,
|
7343
7355
|
):
|
7356
|
+
# The name of the dataset.
|
7357
|
+
#
|
7358
|
+
# * The name can contain lowercase letters, digits, and underscores (_).
|
7359
|
+
# * The name must start with a lowercase letter.
|
7360
|
+
# * The name must be 3 to 62 characters in length.
|
7361
|
+
#
|
7344
7362
|
# This parameter is required.
|
7345
7363
|
self.name = name
|
7364
|
+
# The type of the dataset. Valid values: metricstore and logstore.
|
7365
|
+
#
|
7346
7366
|
# This parameter is required.
|
7347
7367
|
self.store_type = store_type
|
7368
|
+
# The Logstores or Metricstores.
|
7369
|
+
#
|
7348
7370
|
# This parameter is required.
|
7349
7371
|
self.stores = stores
|
7350
7372
|
|
@@ -8112,7 +8134,7 @@ class DeleteMachineGroupResponse(TeaModel):
|
|
8112
8134
|
return self
|
8113
8135
|
|
8114
8136
|
|
8115
|
-
class
|
8137
|
+
class DeleteMetricStoreResponse(TeaModel):
|
8116
8138
|
def __init__(
|
8117
8139
|
self,
|
8118
8140
|
headers: Dict[str, str] = None,
|
@@ -8145,7 +8167,7 @@ class DeleteOSSExportResponse(TeaModel):
|
|
8145
8167
|
return self
|
8146
8168
|
|
8147
8169
|
|
8148
|
-
class
|
8170
|
+
class DeleteOSSExportResponse(TeaModel):
|
8149
8171
|
def __init__(
|
8150
8172
|
self,
|
8151
8173
|
headers: Dict[str, str] = None,
|
@@ -8178,7 +8200,7 @@ class DeleteOSSHDFSExportResponse(TeaModel):
|
|
8178
8200
|
return self
|
8179
8201
|
|
8180
8202
|
|
8181
|
-
class
|
8203
|
+
class DeleteOSSHDFSExportResponse(TeaModel):
|
8182
8204
|
def __init__(
|
8183
8205
|
self,
|
8184
8206
|
headers: Dict[str, str] = None,
|
@@ -8211,7 +8233,7 @@ class DeleteOSSIngestionResponse(TeaModel):
|
|
8211
8233
|
return self
|
8212
8234
|
|
8213
8235
|
|
8214
|
-
class
|
8236
|
+
class DeleteOSSIngestionResponse(TeaModel):
|
8215
8237
|
def __init__(
|
8216
8238
|
self,
|
8217
8239
|
headers: Dict[str, str] = None,
|
@@ -8244,7 +8266,7 @@ class DeleteProjectResponse(TeaModel):
|
|
8244
8266
|
return self
|
8245
8267
|
|
8246
8268
|
|
8247
|
-
class
|
8269
|
+
class DeleteProjectResponse(TeaModel):
|
8248
8270
|
def __init__(
|
8249
8271
|
self,
|
8250
8272
|
headers: Dict[str, str] = None,
|
@@ -8277,7 +8299,7 @@ class DeleteProjectPolicyResponse(TeaModel):
|
|
8277
8299
|
return self
|
8278
8300
|
|
8279
8301
|
|
8280
|
-
class
|
8302
|
+
class DeleteProjectPolicyResponse(TeaModel):
|
8281
8303
|
def __init__(
|
8282
8304
|
self,
|
8283
8305
|
headers: Dict[str, str] = None,
|
@@ -8310,7 +8332,7 @@ class DeleteSavedSearchResponse(TeaModel):
|
|
8310
8332
|
return self
|
8311
8333
|
|
8312
8334
|
|
8313
|
-
class
|
8335
|
+
class DeleteSavedSearchResponse(TeaModel):
|
8314
8336
|
def __init__(
|
8315
8337
|
self,
|
8316
8338
|
headers: Dict[str, str] = None,
|
@@ -8343,7 +8365,7 @@ class DeleteScheduledSQLResponse(TeaModel):
|
|
8343
8365
|
return self
|
8344
8366
|
|
8345
8367
|
|
8346
|
-
class
|
8368
|
+
class DeleteScheduledSQLResponse(TeaModel):
|
8347
8369
|
def __init__(
|
8348
8370
|
self,
|
8349
8371
|
headers: Dict[str, str] = None,
|
@@ -8376,7 +8398,7 @@ class DeleteStoreViewResponse(TeaModel):
|
|
8376
8398
|
return self
|
8377
8399
|
|
8378
8400
|
|
8379
|
-
class
|
8401
|
+
class DeleteStoreViewResponse(TeaModel):
|
8380
8402
|
def __init__(
|
8381
8403
|
self,
|
8382
8404
|
headers: Dict[str, str] = None,
|
@@ -8409,7 +8431,40 @@ class DisableAlertResponse(TeaModel):
|
|
8409
8431
|
return self
|
8410
8432
|
|
8411
8433
|
|
8412
|
-
class
|
8434
|
+
class DisableAlertResponse(TeaModel):
|
8435
|
+
def __init__(
|
8436
|
+
self,
|
8437
|
+
headers: Dict[str, str] = None,
|
8438
|
+
status_code: int = None,
|
8439
|
+
):
|
8440
|
+
self.headers = headers
|
8441
|
+
self.status_code = status_code
|
8442
|
+
|
8443
|
+
def validate(self):
|
8444
|
+
pass
|
8445
|
+
|
8446
|
+
def to_map(self):
|
8447
|
+
_map = super().to_map()
|
8448
|
+
if _map is not None:
|
8449
|
+
return _map
|
8450
|
+
|
8451
|
+
result = dict()
|
8452
|
+
if self.headers is not None:
|
8453
|
+
result['headers'] = self.headers
|
8454
|
+
if self.status_code is not None:
|
8455
|
+
result['statusCode'] = self.status_code
|
8456
|
+
return result
|
8457
|
+
|
8458
|
+
def from_map(self, m: dict = None):
|
8459
|
+
m = m or dict()
|
8460
|
+
if m.get('headers') is not None:
|
8461
|
+
self.headers = m.get('headers')
|
8462
|
+
if m.get('statusCode') is not None:
|
8463
|
+
self.status_code = m.get('statusCode')
|
8464
|
+
return self
|
8465
|
+
|
8466
|
+
|
8467
|
+
class DisableScheduledSQLResponse(TeaModel):
|
8413
8468
|
def __init__(
|
8414
8469
|
self,
|
8415
8470
|
headers: Dict[str, str] = None,
|
@@ -9358,11 +9413,11 @@ class GetContextLogsRequest(TeaModel):
|
|
9358
9413
|
pack_meta: str = None,
|
9359
9414
|
type: str = None,
|
9360
9415
|
):
|
9361
|
-
# The number of logs that you want to obtain and are generated before the generation time of the start log. Valid values: (0,100]
|
9416
|
+
# The number of logs that you want to obtain and are generated before the generation time of the start log. Valid values: `(0,100]`.
|
9362
9417
|
#
|
9363
9418
|
# This parameter is required.
|
9364
9419
|
self.back_lines = back_lines
|
9365
|
-
# The number of logs that you want to obtain and are generated after the generation time of the start log. Valid values: (0,100]
|
9420
|
+
# The number of logs that you want to obtain and are generated after the generation time of the start log. Valid values: `(0,100]`.
|
9366
9421
|
#
|
9367
9422
|
# This parameter is required.
|
9368
9423
|
self.forward_lines = forward_lines
|
@@ -10334,6 +10389,8 @@ class GetIndexResponseBodyLine(TeaModel):
|
|
10334
10389
|
# The included fields.
|
10335
10390
|
self.include_keys = include_keys
|
10336
10391
|
# The delimiters.
|
10392
|
+
#
|
10393
|
+
# This parameter is required.
|
10337
10394
|
self.token = token
|
10338
10395
|
|
10339
10396
|
def validate(self):
|
@@ -10376,7 +10433,7 @@ class GetIndexResponseBody(TeaModel):
|
|
10376
10433
|
def __init__(
|
10377
10434
|
self,
|
10378
10435
|
index_mode: str = None,
|
10379
|
-
keys: Dict[str,
|
10436
|
+
keys: Dict[str, IndexKey] = None,
|
10380
10437
|
last_modify_time: int = None,
|
10381
10438
|
line: GetIndexResponseBodyLine = None,
|
10382
10439
|
log_reduce: bool = None,
|
@@ -10405,6 +10462,8 @@ class GetIndexResponseBody(TeaModel):
|
|
10405
10462
|
# The storage type. The value is fixed as pg.
|
10406
10463
|
self.storage = storage
|
10407
10464
|
# The lifecycle of the index file. Valid values: 7, 30, and 90. Unit: day.
|
10465
|
+
#
|
10466
|
+
# This parameter is required.
|
10408
10467
|
self.ttl = ttl
|
10409
10468
|
|
10410
10469
|
def validate(self):
|
@@ -10452,7 +10511,7 @@ class GetIndexResponseBody(TeaModel):
|
|
10452
10511
|
self.keys = {}
|
10453
10512
|
if m.get('keys') is not None:
|
10454
10513
|
for k, v in m.get('keys').items():
|
10455
|
-
temp_model =
|
10514
|
+
temp_model = IndexKey()
|
10456
10515
|
self.keys[k] = temp_model.from_map(v)
|
10457
10516
|
if m.get('lastModifyTime') is not None:
|
10458
10517
|
self.last_modify_time = m.get('lastModifyTime')
|
@@ -10561,6 +10620,7 @@ class GetLogStoreMeteringModeResponseBody(TeaModel):
|
|
10561
10620
|
self,
|
10562
10621
|
metering_mode: str = None,
|
10563
10622
|
):
|
10623
|
+
# The billing mode. Default value: ChargeByFunction. Valid values: ChargeByFunction and ChargeByDataIngest.
|
10564
10624
|
self.metering_mode = metering_mode
|
10565
10625
|
|
10566
10626
|
def validate(self):
|
@@ -10871,28 +10931,29 @@ class GetLogsV2Request(TeaModel):
|
|
10871
10931
|
to: int = None,
|
10872
10932
|
topic: str = None,
|
10873
10933
|
):
|
10874
|
-
# Specifies whether to page forward or backward for the scan-based query or
|
10934
|
+
# Specifies whether to page forward or backward for the scan-based query or phrase search.
|
10875
10935
|
self.forward = forward
|
10876
10936
|
# The beginning of the time range to query. The value is the log time that is specified when log data is written.
|
10877
10937
|
#
|
10878
|
-
# The time range that is specified in this operation is a left-closed, right-open interval. The interval includes the start time specified by the from parameter, but does not include the end time specified by the to parameter. If you specify the same value for the from and to parameters, the interval is invalid, and an error message is returned. The value is a UNIX
|
10938
|
+
# The time range that is specified in this operation is a left-closed, right-open interval. The interval includes the start time specified by the from parameter, but does not include the end time specified by the to parameter. If you specify the same value for the from and to parameters, the interval is invalid, and an error message is returned. The value is a timestamp that follows the UNIX time format. It is the number of seconds that have elapsed since January 1, 1970, 00:00:00 UTC.
|
10879
10939
|
#
|
10880
10940
|
# This parameter is required.
|
10881
10941
|
self.from_ = from_
|
10942
|
+
# Specifies whether to highlight the returned result.
|
10882
10943
|
self.highlight = highlight
|
10883
|
-
# The maximum number of logs to return for the request. This parameter takes effect only when the query parameter is set to a search statement.
|
10944
|
+
# The maximum number of logs to return for the request. This parameter takes effect only when the query parameter is set to a search statement. Valid values: 0 to 100. Default value: 100.
|
10884
10945
|
self.line = line
|
10885
10946
|
# The line from which the query starts. This parameter takes effect only when the query parameter is set to a search statement. Default value: 0.
|
10886
10947
|
self.offset = offset
|
10887
10948
|
# Specifies whether to enable the SQL enhancement feature. By default, the feature is disabled.
|
10888
10949
|
self.power_sql = power_sql
|
10889
|
-
# The search statement or
|
10950
|
+
# The search statement or query statement. For more information, see the "Log search overview" and "Log analysis overview" topics.
|
10890
10951
|
#
|
10891
|
-
# If you add set session parallel_sql=true; to the analytic statement in the query parameter, Dedicated SQL is used.
|
10952
|
+
# If you add set session parallel_sql=true; to the analytic statement in the query parameter, Dedicated SQL is used. Example: \\* | set session parallel_sql=true; select count(\\*) as pv.
|
10892
10953
|
#
|
10893
|
-
# Note: If you specify an analytic statement in the query parameter, the line and offset parameters do not take effect in this operation. In this case, we recommend that you set the line and offset parameters to 0 and use the LIMIT clause to
|
10954
|
+
# Note: If you specify an analytic statement in the query parameter, the line and offset parameters do not take effect in this operation. In this case, we recommend that you set the line and offset parameters to 0 and use the LIMIT clause to specify the number of logs to return on each page. For more information, see the "Perform paged queries" topic.
|
10894
10955
|
self.query = query
|
10895
|
-
# Specifies whether to return logs in reverse chronological order of log timestamps. The log timestamps are accurate to
|
10956
|
+
# Specifies whether to return logs in reverse chronological order of log timestamps. The log timestamps are accurate to minutes. Valid values:
|
10896
10957
|
#
|
10897
10958
|
# true: Logs are returned in reverse chronological order of log timestamps. false (default): Logs are returned in chronological order of log timestamps. Note: The reverse parameter takes effect only when the query parameter is set to a search statement. The reverse parameter specifies the method used to sort returned logs. If the query parameter is set to a query statement, the reverse parameter does not take effect. The method used to sort returned logs is specified by the ORDER BY clause in the analytic statement. If you use the keyword asc in the ORDER BY clause, the logs are sorted in chronological order. If you use the keyword desc in the ORDER BY clause, the logs are sorted in reverse chronological order. By default, asc is used in the ORDER BY clause.
|
10898
10959
|
self.reverse = reverse
|
@@ -10900,7 +10961,7 @@ class GetLogsV2Request(TeaModel):
|
|
10900
10961
|
self.session = session
|
10901
10962
|
# The end of the time range to query. The value is the log time that is specified when log data is written.
|
10902
10963
|
#
|
10903
|
-
# The time range that is specified in this operation is a left-closed, right-open interval. The interval includes the start time specified by the from parameter, but does not include the end time specified by the to parameter. If you specify the same value for the from and to parameters, the interval is invalid, and an error message is returned. The value is a UNIX
|
10964
|
+
# The time range that is specified in this operation is a left-closed, right-open interval. The interval includes the start time specified by the from parameter, but does not include the end time specified by the to parameter. If you specify the same value for the from and to parameters, the interval is invalid, and an error message is returned. The value is a timestamp that follows the UNIX time format. It is the number of seconds that have elapsed since January 1, 1970, 00:00:00 UTC.
|
10904
10965
|
#
|
10905
10966
|
# This parameter is required.
|
10906
10967
|
self.to = to
|
@@ -11048,7 +11109,7 @@ class GetLogsV2ResponseBodyMeta(TeaModel):
|
|
11048
11109
|
# Indicates whether the query is an SQL query.
|
11049
11110
|
self.has_sql = has_sql
|
11050
11111
|
self.highlights = highlights
|
11051
|
-
# Indicates whether the returned result is accurate.
|
11112
|
+
# Indicates whether the returned result is accurate to seconds.
|
11052
11113
|
self.is_accurate = is_accurate
|
11053
11114
|
# All keys in the query result.
|
11054
11115
|
self.keys = keys
|
@@ -11194,7 +11255,7 @@ class GetLogsV2ResponseBody(TeaModel):
|
|
11194
11255
|
):
|
11195
11256
|
# The returned result.
|
11196
11257
|
self.data = data
|
11197
|
-
# The metadata
|
11258
|
+
# The metadata of the returned data.
|
11198
11259
|
self.meta = meta
|
11199
11260
|
|
11200
11261
|
def validate(self):
|
@@ -11461,11 +11522,128 @@ class GetMachineGroupResponse(TeaModel):
|
|
11461
11522
|
return self
|
11462
11523
|
|
11463
11524
|
|
11525
|
+
class GetMetricStoreResponseBody(TeaModel):
|
11526
|
+
def __init__(
|
11527
|
+
self,
|
11528
|
+
auto_split: bool = None,
|
11529
|
+
create_time: int = None,
|
11530
|
+
last_modify_time: int = None,
|
11531
|
+
max_split_shard: int = None,
|
11532
|
+
metric_type: str = None,
|
11533
|
+
mode: str = None,
|
11534
|
+
name: str = None,
|
11535
|
+
shard_count: int = None,
|
11536
|
+
ttl: int = None,
|
11537
|
+
):
|
11538
|
+
self.auto_split = auto_split
|
11539
|
+
self.create_time = create_time
|
11540
|
+
self.last_modify_time = last_modify_time
|
11541
|
+
self.max_split_shard = max_split_shard
|
11542
|
+
self.metric_type = metric_type
|
11543
|
+
self.mode = mode
|
11544
|
+
self.name = name
|
11545
|
+
self.shard_count = shard_count
|
11546
|
+
self.ttl = ttl
|
11547
|
+
|
11548
|
+
def validate(self):
|
11549
|
+
pass
|
11550
|
+
|
11551
|
+
def to_map(self):
|
11552
|
+
_map = super().to_map()
|
11553
|
+
if _map is not None:
|
11554
|
+
return _map
|
11555
|
+
|
11556
|
+
result = dict()
|
11557
|
+
if self.auto_split is not None:
|
11558
|
+
result['autoSplit'] = self.auto_split
|
11559
|
+
if self.create_time is not None:
|
11560
|
+
result['createTime'] = self.create_time
|
11561
|
+
if self.last_modify_time is not None:
|
11562
|
+
result['lastModifyTime'] = self.last_modify_time
|
11563
|
+
if self.max_split_shard is not None:
|
11564
|
+
result['maxSplitShard'] = self.max_split_shard
|
11565
|
+
if self.metric_type is not None:
|
11566
|
+
result['metricType'] = self.metric_type
|
11567
|
+
if self.mode is not None:
|
11568
|
+
result['mode'] = self.mode
|
11569
|
+
if self.name is not None:
|
11570
|
+
result['name'] = self.name
|
11571
|
+
if self.shard_count is not None:
|
11572
|
+
result['shardCount'] = self.shard_count
|
11573
|
+
if self.ttl is not None:
|
11574
|
+
result['ttl'] = self.ttl
|
11575
|
+
return result
|
11576
|
+
|
11577
|
+
def from_map(self, m: dict = None):
|
11578
|
+
m = m or dict()
|
11579
|
+
if m.get('autoSplit') is not None:
|
11580
|
+
self.auto_split = m.get('autoSplit')
|
11581
|
+
if m.get('createTime') is not None:
|
11582
|
+
self.create_time = m.get('createTime')
|
11583
|
+
if m.get('lastModifyTime') is not None:
|
11584
|
+
self.last_modify_time = m.get('lastModifyTime')
|
11585
|
+
if m.get('maxSplitShard') is not None:
|
11586
|
+
self.max_split_shard = m.get('maxSplitShard')
|
11587
|
+
if m.get('metricType') is not None:
|
11588
|
+
self.metric_type = m.get('metricType')
|
11589
|
+
if m.get('mode') is not None:
|
11590
|
+
self.mode = m.get('mode')
|
11591
|
+
if m.get('name') is not None:
|
11592
|
+
self.name = m.get('name')
|
11593
|
+
if m.get('shardCount') is not None:
|
11594
|
+
self.shard_count = m.get('shardCount')
|
11595
|
+
if m.get('ttl') is not None:
|
11596
|
+
self.ttl = m.get('ttl')
|
11597
|
+
return self
|
11598
|
+
|
11599
|
+
|
11600
|
+
class GetMetricStoreResponse(TeaModel):
|
11601
|
+
def __init__(
|
11602
|
+
self,
|
11603
|
+
headers: Dict[str, str] = None,
|
11604
|
+
status_code: int = None,
|
11605
|
+
body: GetMetricStoreResponseBody = None,
|
11606
|
+
):
|
11607
|
+
self.headers = headers
|
11608
|
+
self.status_code = status_code
|
11609
|
+
self.body = body
|
11610
|
+
|
11611
|
+
def validate(self):
|
11612
|
+
if self.body:
|
11613
|
+
self.body.validate()
|
11614
|
+
|
11615
|
+
def to_map(self):
|
11616
|
+
_map = super().to_map()
|
11617
|
+
if _map is not None:
|
11618
|
+
return _map
|
11619
|
+
|
11620
|
+
result = dict()
|
11621
|
+
if self.headers is not None:
|
11622
|
+
result['headers'] = self.headers
|
11623
|
+
if self.status_code is not None:
|
11624
|
+
result['statusCode'] = self.status_code
|
11625
|
+
if self.body is not None:
|
11626
|
+
result['body'] = self.body.to_map()
|
11627
|
+
return result
|
11628
|
+
|
11629
|
+
def from_map(self, m: dict = None):
|
11630
|
+
m = m or dict()
|
11631
|
+
if m.get('headers') is not None:
|
11632
|
+
self.headers = m.get('headers')
|
11633
|
+
if m.get('statusCode') is not None:
|
11634
|
+
self.status_code = m.get('statusCode')
|
11635
|
+
if m.get('body') is not None:
|
11636
|
+
temp_model = GetMetricStoreResponseBody()
|
11637
|
+
self.body = temp_model.from_map(m['body'])
|
11638
|
+
return self
|
11639
|
+
|
11640
|
+
|
11464
11641
|
class GetMetricStoreMeteringModeResponseBody(TeaModel):
|
11465
11642
|
def __init__(
|
11466
11643
|
self,
|
11467
11644
|
metering_mode: str = None,
|
11468
11645
|
):
|
11646
|
+
# The billing mode. Default value: ChargeByFunction. Valid values: ChargeByFunction and ChargeByDataIngest.
|
11469
11647
|
self.metering_mode = metering_mode
|
11470
11648
|
|
11471
11649
|
def validate(self):
|
@@ -12040,7 +12218,27 @@ class GetStoreViewResponseBody(TeaModel):
|
|
12040
12218
|
store_type: str = None,
|
12041
12219
|
stores: List[StoreViewStore] = None,
|
12042
12220
|
):
|
12221
|
+
# The type of the dataset.
|
12222
|
+
#
|
12223
|
+
# Valid values:
|
12224
|
+
#
|
12225
|
+
# * metricstore
|
12226
|
+
#
|
12227
|
+
# <!-- -->
|
12228
|
+
#
|
12229
|
+
# <!-- -->
|
12230
|
+
#
|
12231
|
+
# <!-- -->
|
12232
|
+
#
|
12233
|
+
# * logstore
|
12234
|
+
#
|
12235
|
+
# <!-- -->
|
12236
|
+
#
|
12237
|
+
# <!-- -->
|
12238
|
+
#
|
12239
|
+
# <!-- -->
|
12043
12240
|
self.store_type = store_type
|
12241
|
+
# The Logstores or Metricstores.
|
12044
12242
|
self.stores = stores
|
12045
12243
|
|
12046
12244
|
def validate(self):
|
@@ -12123,8 +12321,11 @@ class GetStoreViewIndexResponseBodyIndexes(TeaModel):
|
|
12123
12321
|
logstore: str = None,
|
12124
12322
|
project: str = None,
|
12125
12323
|
):
|
12324
|
+
# The index configurations of the Logstore.
|
12126
12325
|
self.index = index
|
12326
|
+
# The name of the Logstore.
|
12127
12327
|
self.logstore = logstore
|
12328
|
+
# The name of the project to which the Logstore belongs.
|
12128
12329
|
self.project = project
|
12129
12330
|
|
12130
12331
|
def validate(self):
|
@@ -12162,6 +12363,7 @@ class GetStoreViewIndexResponseBody(TeaModel):
|
|
12162
12363
|
self,
|
12163
12364
|
indexes: List[GetStoreViewIndexResponseBodyIndexes] = None,
|
12164
12365
|
):
|
12366
|
+
# The index configurations.
|
12165
12367
|
self.indexes = indexes
|
12166
12368
|
|
12167
12369
|
def validate(self):
|
@@ -12279,8 +12481,11 @@ class ListAlertsResponseBody(TeaModel):
|
|
12279
12481
|
results: List[Alert] = None,
|
12280
12482
|
total: int = None,
|
12281
12483
|
):
|
12484
|
+
# The number of alert rules that are returned.
|
12282
12485
|
self.count = count
|
12486
|
+
# The alert rules.
|
12283
12487
|
self.results = results
|
12488
|
+
# The total number of alert rules in the project.
|
12284
12489
|
self.total = total
|
12285
12490
|
|
12286
12491
|
def validate(self):
|
@@ -13219,7 +13424,7 @@ class ListConfigRequest(TeaModel):
|
|
13219
13424
|
offset: int = None,
|
13220
13425
|
size: int = None,
|
13221
13426
|
):
|
13222
|
-
# The name of the Logtail configuration.
|
13427
|
+
# The name of the Logtail configuration, which is used for fuzzy match.
|
13223
13428
|
self.config_name = config_name
|
13224
13429
|
# The name of the Logstore.
|
13225
13430
|
#
|
@@ -14731,14 +14936,16 @@ class ListMachinesResponse(TeaModel):
|
|
14731
14936
|
return self
|
14732
14937
|
|
14733
14938
|
|
14734
|
-
class
|
14939
|
+
class ListMetricStoresRequest(TeaModel):
|
14735
14940
|
def __init__(
|
14736
14941
|
self,
|
14737
|
-
|
14942
|
+
mode: str = None,
|
14943
|
+
name: str = None,
|
14738
14944
|
offset: int = None,
|
14739
14945
|
size: int = None,
|
14740
14946
|
):
|
14741
|
-
self.
|
14947
|
+
self.mode = mode
|
14948
|
+
self.name = name
|
14742
14949
|
self.offset = offset
|
14743
14950
|
self.size = size
|
14744
14951
|
|
@@ -14751,8 +14958,10 @@ class ListOSSExportsRequest(TeaModel):
|
|
14751
14958
|
return _map
|
14752
14959
|
|
14753
14960
|
result = dict()
|
14754
|
-
if self.
|
14755
|
-
result['
|
14961
|
+
if self.mode is not None:
|
14962
|
+
result['mode'] = self.mode
|
14963
|
+
if self.name is not None:
|
14964
|
+
result['name'] = self.name
|
14756
14965
|
if self.offset is not None:
|
14757
14966
|
result['offset'] = self.offset
|
14758
14967
|
if self.size is not None:
|
@@ -14761,8 +14970,10 @@ class ListOSSExportsRequest(TeaModel):
|
|
14761
14970
|
|
14762
14971
|
def from_map(self, m: dict = None):
|
14763
14972
|
m = m or dict()
|
14764
|
-
if m.get('
|
14765
|
-
self.
|
14973
|
+
if m.get('mode') is not None:
|
14974
|
+
self.mode = m.get('mode')
|
14975
|
+
if m.get('name') is not None:
|
14976
|
+
self.name = m.get('name')
|
14766
14977
|
if m.get('offset') is not None:
|
14767
14978
|
self.offset = m.get('offset')
|
14768
14979
|
if m.get('size') is not None:
|
@@ -14770,22 +14981,19 @@ class ListOSSExportsRequest(TeaModel):
|
|
14770
14981
|
return self
|
14771
14982
|
|
14772
14983
|
|
14773
|
-
class
|
14984
|
+
class ListMetricStoresResponseBody(TeaModel):
|
14774
14985
|
def __init__(
|
14775
14986
|
self,
|
14776
14987
|
count: int = None,
|
14777
|
-
|
14988
|
+
metricstores: List[str] = None,
|
14778
14989
|
total: int = None,
|
14779
14990
|
):
|
14780
14991
|
self.count = count
|
14781
|
-
self.
|
14992
|
+
self.metricstores = metricstores
|
14782
14993
|
self.total = total
|
14783
14994
|
|
14784
14995
|
def validate(self):
|
14785
|
-
|
14786
|
-
for k in self.results:
|
14787
|
-
if k:
|
14788
|
-
k.validate()
|
14996
|
+
pass
|
14789
14997
|
|
14790
14998
|
def to_map(self):
|
14791
14999
|
_map = super().to_map()
|
@@ -14795,10 +15003,8 @@ class ListOSSExportsResponseBody(TeaModel):
|
|
14795
15003
|
result = dict()
|
14796
15004
|
if self.count is not None:
|
14797
15005
|
result['count'] = self.count
|
14798
|
-
|
14799
|
-
|
14800
|
-
for k in self.results:
|
14801
|
-
result['results'].append(k.to_map() if k else None)
|
15006
|
+
if self.metricstores is not None:
|
15007
|
+
result['metricstores'] = self.metricstores
|
14802
15008
|
if self.total is not None:
|
14803
15009
|
result['total'] = self.total
|
14804
15010
|
return result
|
@@ -14807,22 +15013,19 @@ class ListOSSExportsResponseBody(TeaModel):
|
|
14807
15013
|
m = m or dict()
|
14808
15014
|
if m.get('count') is not None:
|
14809
15015
|
self.count = m.get('count')
|
14810
|
-
|
14811
|
-
|
14812
|
-
for k in m.get('results'):
|
14813
|
-
temp_model = OSSExport()
|
14814
|
-
self.results.append(temp_model.from_map(k))
|
15016
|
+
if m.get('metricstores') is not None:
|
15017
|
+
self.metricstores = m.get('metricstores')
|
14815
15018
|
if m.get('total') is not None:
|
14816
15019
|
self.total = m.get('total')
|
14817
15020
|
return self
|
14818
15021
|
|
14819
15022
|
|
14820
|
-
class
|
15023
|
+
class ListMetricStoresResponse(TeaModel):
|
14821
15024
|
def __init__(
|
14822
15025
|
self,
|
14823
15026
|
headers: Dict[str, str] = None,
|
14824
15027
|
status_code: int = None,
|
14825
|
-
body:
|
15028
|
+
body: ListMetricStoresResponseBody = None,
|
14826
15029
|
):
|
14827
15030
|
self.headers = headers
|
14828
15031
|
self.status_code = status_code
|
@@ -14853,12 +15056,12 @@ class ListOSSExportsResponse(TeaModel):
|
|
14853
15056
|
if m.get('statusCode') is not None:
|
14854
15057
|
self.status_code = m.get('statusCode')
|
14855
15058
|
if m.get('body') is not None:
|
14856
|
-
temp_model =
|
15059
|
+
temp_model = ListMetricStoresResponseBody()
|
14857
15060
|
self.body = temp_model.from_map(m['body'])
|
14858
15061
|
return self
|
14859
15062
|
|
14860
15063
|
|
14861
|
-
class
|
15064
|
+
class ListOSSExportsRequest(TeaModel):
|
14862
15065
|
def __init__(
|
14863
15066
|
self,
|
14864
15067
|
logstore: str = None,
|
@@ -14897,7 +15100,7 @@ class ListOSSHDFSExportsRequest(TeaModel):
|
|
14897
15100
|
return self
|
14898
15101
|
|
14899
15102
|
|
14900
|
-
class
|
15103
|
+
class ListOSSExportsResponseBody(TeaModel):
|
14901
15104
|
def __init__(
|
14902
15105
|
self,
|
14903
15106
|
count: int = None,
|
@@ -14944,12 +15147,12 @@ class ListOSSHDFSExportsResponseBody(TeaModel):
|
|
14944
15147
|
return self
|
14945
15148
|
|
14946
15149
|
|
14947
|
-
class
|
15150
|
+
class ListOSSExportsResponse(TeaModel):
|
14948
15151
|
def __init__(
|
14949
15152
|
self,
|
14950
15153
|
headers: Dict[str, str] = None,
|
14951
15154
|
status_code: int = None,
|
14952
|
-
body:
|
15155
|
+
body: ListOSSExportsResponseBody = None,
|
14953
15156
|
):
|
14954
15157
|
self.headers = headers
|
14955
15158
|
self.status_code = status_code
|
@@ -14980,12 +15183,12 @@ class ListOSSHDFSExportsResponse(TeaModel):
|
|
14980
15183
|
if m.get('statusCode') is not None:
|
14981
15184
|
self.status_code = m.get('statusCode')
|
14982
15185
|
if m.get('body') is not None:
|
14983
|
-
temp_model =
|
15186
|
+
temp_model = ListOSSExportsResponseBody()
|
14984
15187
|
self.body = temp_model.from_map(m['body'])
|
14985
15188
|
return self
|
14986
15189
|
|
14987
15190
|
|
14988
|
-
class
|
15191
|
+
class ListOSSHDFSExportsRequest(TeaModel):
|
14989
15192
|
def __init__(
|
14990
15193
|
self,
|
14991
15194
|
logstore: str = None,
|
@@ -15024,11 +15227,11 @@ class ListOSSIngestionsRequest(TeaModel):
|
|
15024
15227
|
return self
|
15025
15228
|
|
15026
15229
|
|
15027
|
-
class
|
15230
|
+
class ListOSSHDFSExportsResponseBody(TeaModel):
|
15028
15231
|
def __init__(
|
15029
15232
|
self,
|
15030
15233
|
count: int = None,
|
15031
|
-
results: List[
|
15234
|
+
results: List[OSSExport] = None,
|
15032
15235
|
total: int = None,
|
15033
15236
|
):
|
15034
15237
|
self.count = count
|
@@ -15064,19 +15267,19 @@ class ListOSSIngestionsResponseBody(TeaModel):
|
|
15064
15267
|
self.results = []
|
15065
15268
|
if m.get('results') is not None:
|
15066
15269
|
for k in m.get('results'):
|
15067
|
-
temp_model =
|
15270
|
+
temp_model = OSSExport()
|
15068
15271
|
self.results.append(temp_model.from_map(k))
|
15069
15272
|
if m.get('total') is not None:
|
15070
15273
|
self.total = m.get('total')
|
15071
15274
|
return self
|
15072
15275
|
|
15073
15276
|
|
15074
|
-
class
|
15277
|
+
class ListOSSHDFSExportsResponse(TeaModel):
|
15075
15278
|
def __init__(
|
15076
15279
|
self,
|
15077
15280
|
headers: Dict[str, str] = None,
|
15078
15281
|
status_code: int = None,
|
15079
|
-
body:
|
15282
|
+
body: ListOSSHDFSExportsResponseBody = None,
|
15080
15283
|
):
|
15081
15284
|
self.headers = headers
|
15082
15285
|
self.status_code = status_code
|
@@ -15107,27 +15310,20 @@ class ListOSSIngestionsResponse(TeaModel):
|
|
15107
15310
|
if m.get('statusCode') is not None:
|
15108
15311
|
self.status_code = m.get('statusCode')
|
15109
15312
|
if m.get('body') is not None:
|
15110
|
-
temp_model =
|
15313
|
+
temp_model = ListOSSHDFSExportsResponseBody()
|
15111
15314
|
self.body = temp_model.from_map(m['body'])
|
15112
15315
|
return self
|
15113
15316
|
|
15114
15317
|
|
15115
|
-
class
|
15318
|
+
class ListOSSIngestionsRequest(TeaModel):
|
15116
15319
|
def __init__(
|
15117
15320
|
self,
|
15118
|
-
|
15321
|
+
logstore: str = None,
|
15119
15322
|
offset: int = None,
|
15120
|
-
project_name: str = None,
|
15121
|
-
resource_group_id: str = None,
|
15122
15323
|
size: int = None,
|
15123
15324
|
):
|
15124
|
-
self.
|
15125
|
-
# The line from which the query starts. Default value: 0.
|
15325
|
+
self.logstore = logstore
|
15126
15326
|
self.offset = offset
|
15127
|
-
# The name of the project.
|
15128
|
-
self.project_name = project_name
|
15129
|
-
self.resource_group_id = resource_group_id
|
15130
|
-
# The number of entries per page. Default value: 100. This operation can return up to 500 projects.
|
15131
15327
|
self.size = size
|
15132
15328
|
|
15133
15329
|
def validate(self):
|
@@ -15139,45 +15335,182 @@ class ListProjectRequest(TeaModel):
|
|
15139
15335
|
return _map
|
15140
15336
|
|
15141
15337
|
result = dict()
|
15142
|
-
if self.
|
15143
|
-
result['
|
15338
|
+
if self.logstore is not None:
|
15339
|
+
result['logstore'] = self.logstore
|
15144
15340
|
if self.offset is not None:
|
15145
15341
|
result['offset'] = self.offset
|
15146
|
-
if self.project_name is not None:
|
15147
|
-
result['projectName'] = self.project_name
|
15148
|
-
if self.resource_group_id is not None:
|
15149
|
-
result['resourceGroupId'] = self.resource_group_id
|
15150
15342
|
if self.size is not None:
|
15151
15343
|
result['size'] = self.size
|
15152
15344
|
return result
|
15153
15345
|
|
15154
15346
|
def from_map(self, m: dict = None):
|
15155
15347
|
m = m or dict()
|
15156
|
-
if m.get('
|
15157
|
-
self.
|
15348
|
+
if m.get('logstore') is not None:
|
15349
|
+
self.logstore = m.get('logstore')
|
15158
15350
|
if m.get('offset') is not None:
|
15159
15351
|
self.offset = m.get('offset')
|
15160
|
-
if m.get('projectName') is not None:
|
15161
|
-
self.project_name = m.get('projectName')
|
15162
|
-
if m.get('resourceGroupId') is not None:
|
15163
|
-
self.resource_group_id = m.get('resourceGroupId')
|
15164
15352
|
if m.get('size') is not None:
|
15165
15353
|
self.size = m.get('size')
|
15166
15354
|
return self
|
15167
15355
|
|
15168
15356
|
|
15169
|
-
class
|
15357
|
+
class ListOSSIngestionsResponseBody(TeaModel):
|
15170
15358
|
def __init__(
|
15171
15359
|
self,
|
15172
15360
|
count: int = None,
|
15173
|
-
|
15361
|
+
results: List[OSSIngestion] = None,
|
15174
15362
|
total: int = None,
|
15175
15363
|
):
|
15176
|
-
# The number of
|
15364
|
+
# The number of OSS data import jobs that are returned.
|
15177
15365
|
self.count = count
|
15178
|
-
# The
|
15179
|
-
self.
|
15180
|
-
# The total number of
|
15366
|
+
# The OSS data import jobs.
|
15367
|
+
self.results = results
|
15368
|
+
# The total number of OSS data import jobs in the project.
|
15369
|
+
self.total = total
|
15370
|
+
|
15371
|
+
def validate(self):
|
15372
|
+
if self.results:
|
15373
|
+
for k in self.results:
|
15374
|
+
if k:
|
15375
|
+
k.validate()
|
15376
|
+
|
15377
|
+
def to_map(self):
|
15378
|
+
_map = super().to_map()
|
15379
|
+
if _map is not None:
|
15380
|
+
return _map
|
15381
|
+
|
15382
|
+
result = dict()
|
15383
|
+
if self.count is not None:
|
15384
|
+
result['count'] = self.count
|
15385
|
+
result['results'] = []
|
15386
|
+
if self.results is not None:
|
15387
|
+
for k in self.results:
|
15388
|
+
result['results'].append(k.to_map() if k else None)
|
15389
|
+
if self.total is not None:
|
15390
|
+
result['total'] = self.total
|
15391
|
+
return result
|
15392
|
+
|
15393
|
+
def from_map(self, m: dict = None):
|
15394
|
+
m = m or dict()
|
15395
|
+
if m.get('count') is not None:
|
15396
|
+
self.count = m.get('count')
|
15397
|
+
self.results = []
|
15398
|
+
if m.get('results') is not None:
|
15399
|
+
for k in m.get('results'):
|
15400
|
+
temp_model = OSSIngestion()
|
15401
|
+
self.results.append(temp_model.from_map(k))
|
15402
|
+
if m.get('total') is not None:
|
15403
|
+
self.total = m.get('total')
|
15404
|
+
return self
|
15405
|
+
|
15406
|
+
|
15407
|
+
class ListOSSIngestionsResponse(TeaModel):
|
15408
|
+
def __init__(
|
15409
|
+
self,
|
15410
|
+
headers: Dict[str, str] = None,
|
15411
|
+
status_code: int = None,
|
15412
|
+
body: ListOSSIngestionsResponseBody = None,
|
15413
|
+
):
|
15414
|
+
self.headers = headers
|
15415
|
+
self.status_code = status_code
|
15416
|
+
self.body = body
|
15417
|
+
|
15418
|
+
def validate(self):
|
15419
|
+
if self.body:
|
15420
|
+
self.body.validate()
|
15421
|
+
|
15422
|
+
def to_map(self):
|
15423
|
+
_map = super().to_map()
|
15424
|
+
if _map is not None:
|
15425
|
+
return _map
|
15426
|
+
|
15427
|
+
result = dict()
|
15428
|
+
if self.headers is not None:
|
15429
|
+
result['headers'] = self.headers
|
15430
|
+
if self.status_code is not None:
|
15431
|
+
result['statusCode'] = self.status_code
|
15432
|
+
if self.body is not None:
|
15433
|
+
result['body'] = self.body.to_map()
|
15434
|
+
return result
|
15435
|
+
|
15436
|
+
def from_map(self, m: dict = None):
|
15437
|
+
m = m or dict()
|
15438
|
+
if m.get('headers') is not None:
|
15439
|
+
self.headers = m.get('headers')
|
15440
|
+
if m.get('statusCode') is not None:
|
15441
|
+
self.status_code = m.get('statusCode')
|
15442
|
+
if m.get('body') is not None:
|
15443
|
+
temp_model = ListOSSIngestionsResponseBody()
|
15444
|
+
self.body = temp_model.from_map(m['body'])
|
15445
|
+
return self
|
15446
|
+
|
15447
|
+
|
15448
|
+
class ListProjectRequest(TeaModel):
|
15449
|
+
def __init__(
|
15450
|
+
self,
|
15451
|
+
fetch_quota: bool = None,
|
15452
|
+
offset: int = None,
|
15453
|
+
project_name: str = None,
|
15454
|
+
resource_group_id: str = None,
|
15455
|
+
size: int = None,
|
15456
|
+
):
|
15457
|
+
self.fetch_quota = fetch_quota
|
15458
|
+
# The line from which the query starts. Default value: 0.
|
15459
|
+
self.offset = offset
|
15460
|
+
# The name of the project.
|
15461
|
+
self.project_name = project_name
|
15462
|
+
self.resource_group_id = resource_group_id
|
15463
|
+
# The number of entries per page. Default value: 100. This operation can return up to 500 projects.
|
15464
|
+
self.size = size
|
15465
|
+
|
15466
|
+
def validate(self):
|
15467
|
+
pass
|
15468
|
+
|
15469
|
+
def to_map(self):
|
15470
|
+
_map = super().to_map()
|
15471
|
+
if _map is not None:
|
15472
|
+
return _map
|
15473
|
+
|
15474
|
+
result = dict()
|
15475
|
+
if self.fetch_quota is not None:
|
15476
|
+
result['fetchQuota'] = self.fetch_quota
|
15477
|
+
if self.offset is not None:
|
15478
|
+
result['offset'] = self.offset
|
15479
|
+
if self.project_name is not None:
|
15480
|
+
result['projectName'] = self.project_name
|
15481
|
+
if self.resource_group_id is not None:
|
15482
|
+
result['resourceGroupId'] = self.resource_group_id
|
15483
|
+
if self.size is not None:
|
15484
|
+
result['size'] = self.size
|
15485
|
+
return result
|
15486
|
+
|
15487
|
+
def from_map(self, m: dict = None):
|
15488
|
+
m = m or dict()
|
15489
|
+
if m.get('fetchQuota') is not None:
|
15490
|
+
self.fetch_quota = m.get('fetchQuota')
|
15491
|
+
if m.get('offset') is not None:
|
15492
|
+
self.offset = m.get('offset')
|
15493
|
+
if m.get('projectName') is not None:
|
15494
|
+
self.project_name = m.get('projectName')
|
15495
|
+
if m.get('resourceGroupId') is not None:
|
15496
|
+
self.resource_group_id = m.get('resourceGroupId')
|
15497
|
+
if m.get('size') is not None:
|
15498
|
+
self.size = m.get('size')
|
15499
|
+
return self
|
15500
|
+
|
15501
|
+
|
15502
|
+
class ListProjectResponseBody(TeaModel):
|
15503
|
+
def __init__(
|
15504
|
+
self,
|
15505
|
+
count: int = None,
|
15506
|
+
projects: List[Project] = None,
|
15507
|
+
total: int = None,
|
15508
|
+
):
|
15509
|
+
# The number of returned projects on the current page.
|
15510
|
+
self.count = count
|
15511
|
+
# The projects that meet the query conditions.
|
15512
|
+
self.projects = projects
|
15513
|
+
# The total number of projects that meet the query conditions.
|
15181
15514
|
self.total = total
|
15182
15515
|
|
15183
15516
|
def validate(self):
|
@@ -15390,6 +15723,7 @@ class ListScheduledSQLsRequest(TeaModel):
|
|
15390
15723
|
offset: int = None,
|
15391
15724
|
size: int = None,
|
15392
15725
|
):
|
15726
|
+
# The name of the Logstore.
|
15393
15727
|
self.logstore = logstore
|
15394
15728
|
self.offset = offset
|
15395
15729
|
self.size = size
|
@@ -15565,9 +15899,31 @@ class ListStoreViewsRequest(TeaModel):
|
|
15565
15899
|
size: int = None,
|
15566
15900
|
store_type: str = None,
|
15567
15901
|
):
|
15902
|
+
# The dataset name that is used for fuzzy match.
|
15568
15903
|
self.name = name
|
15904
|
+
# The offset of the datasets to return. Default value: 0.
|
15569
15905
|
self.offset = offset
|
15906
|
+
# The number of datasets to return. Default value: 100.
|
15570
15907
|
self.size = size
|
15908
|
+
# The type of the datasets to return. By default, datasets are not filtered by type.
|
15909
|
+
#
|
15910
|
+
# Valid values:
|
15911
|
+
#
|
15912
|
+
# * metricstore
|
15913
|
+
#
|
15914
|
+
# <!-- -->
|
15915
|
+
#
|
15916
|
+
# <!-- -->
|
15917
|
+
#
|
15918
|
+
# <!-- -->
|
15919
|
+
#
|
15920
|
+
# * logstore
|
15921
|
+
#
|
15922
|
+
# <!-- -->
|
15923
|
+
#
|
15924
|
+
# <!-- -->
|
15925
|
+
#
|
15926
|
+
# <!-- -->
|
15571
15927
|
self.store_type = store_type
|
15572
15928
|
|
15573
15929
|
def validate(self):
|
@@ -15609,8 +15965,11 @@ class ListStoreViewsResponseBody(TeaModel):
|
|
15609
15965
|
storeviews: List[str] = None,
|
15610
15966
|
total: int = None,
|
15611
15967
|
):
|
15968
|
+
# The number of returned datasets.
|
15612
15969
|
self.count = count
|
15970
|
+
# The dataset names.
|
15613
15971
|
self.storeviews = storeviews
|
15972
|
+
# The total number of datasets in the project.
|
15614
15973
|
self.total = total
|
15615
15974
|
|
15616
15975
|
def validate(self):
|
@@ -16331,115 +16690,6 @@ class PutWebtrackingResponse(TeaModel):
|
|
16331
16690
|
return self
|
16332
16691
|
|
16333
16692
|
|
16334
|
-
class QueryMLServiceResultsRequest(TeaModel):
|
16335
|
-
def __init__(
|
16336
|
-
self,
|
16337
|
-
allow_builtin: bool = None,
|
16338
|
-
body: MLServiceAnalysisParam = None,
|
16339
|
-
):
|
16340
|
-
self.allow_builtin = allow_builtin
|
16341
|
-
self.body = body
|
16342
|
-
|
16343
|
-
def validate(self):
|
16344
|
-
if self.body:
|
16345
|
-
self.body.validate()
|
16346
|
-
|
16347
|
-
def to_map(self):
|
16348
|
-
_map = super().to_map()
|
16349
|
-
if _map is not None:
|
16350
|
-
return _map
|
16351
|
-
|
16352
|
-
result = dict()
|
16353
|
-
if self.allow_builtin is not None:
|
16354
|
-
result['allowBuiltin'] = self.allow_builtin
|
16355
|
-
if self.body is not None:
|
16356
|
-
result['body'] = self.body.to_map()
|
16357
|
-
return result
|
16358
|
-
|
16359
|
-
def from_map(self, m: dict = None):
|
16360
|
-
m = m or dict()
|
16361
|
-
if m.get('allowBuiltin') is not None:
|
16362
|
-
self.allow_builtin = m.get('allowBuiltin')
|
16363
|
-
if m.get('body') is not None:
|
16364
|
-
temp_model = MLServiceAnalysisParam()
|
16365
|
-
self.body = temp_model.from_map(m['body'])
|
16366
|
-
return self
|
16367
|
-
|
16368
|
-
|
16369
|
-
class QueryMLServiceResultsResponseBody(TeaModel):
|
16370
|
-
def __init__(
|
16371
|
-
self,
|
16372
|
-
data: List[Dict[str, str]] = None,
|
16373
|
-
status: Dict[str, str] = None,
|
16374
|
-
):
|
16375
|
-
self.data = data
|
16376
|
-
self.status = status
|
16377
|
-
|
16378
|
-
def validate(self):
|
16379
|
-
pass
|
16380
|
-
|
16381
|
-
def to_map(self):
|
16382
|
-
_map = super().to_map()
|
16383
|
-
if _map is not None:
|
16384
|
-
return _map
|
16385
|
-
|
16386
|
-
result = dict()
|
16387
|
-
if self.data is not None:
|
16388
|
-
result['data'] = self.data
|
16389
|
-
if self.status is not None:
|
16390
|
-
result['status'] = self.status
|
16391
|
-
return result
|
16392
|
-
|
16393
|
-
def from_map(self, m: dict = None):
|
16394
|
-
m = m or dict()
|
16395
|
-
if m.get('data') is not None:
|
16396
|
-
self.data = m.get('data')
|
16397
|
-
if m.get('status') is not None:
|
16398
|
-
self.status = m.get('status')
|
16399
|
-
return self
|
16400
|
-
|
16401
|
-
|
16402
|
-
class QueryMLServiceResultsResponse(TeaModel):
|
16403
|
-
def __init__(
|
16404
|
-
self,
|
16405
|
-
headers: Dict[str, str] = None,
|
16406
|
-
status_code: int = None,
|
16407
|
-
body: QueryMLServiceResultsResponseBody = None,
|
16408
|
-
):
|
16409
|
-
self.headers = headers
|
16410
|
-
self.status_code = status_code
|
16411
|
-
self.body = body
|
16412
|
-
|
16413
|
-
def validate(self):
|
16414
|
-
if self.body:
|
16415
|
-
self.body.validate()
|
16416
|
-
|
16417
|
-
def to_map(self):
|
16418
|
-
_map = super().to_map()
|
16419
|
-
if _map is not None:
|
16420
|
-
return _map
|
16421
|
-
|
16422
|
-
result = dict()
|
16423
|
-
if self.headers is not None:
|
16424
|
-
result['headers'] = self.headers
|
16425
|
-
if self.status_code is not None:
|
16426
|
-
result['statusCode'] = self.status_code
|
16427
|
-
if self.body is not None:
|
16428
|
-
result['body'] = self.body.to_map()
|
16429
|
-
return result
|
16430
|
-
|
16431
|
-
def from_map(self, m: dict = None):
|
16432
|
-
m = m or dict()
|
16433
|
-
if m.get('headers') is not None:
|
16434
|
-
self.headers = m.get('headers')
|
16435
|
-
if m.get('statusCode') is not None:
|
16436
|
-
self.status_code = m.get('statusCode')
|
16437
|
-
if m.get('body') is not None:
|
16438
|
-
temp_model = QueryMLServiceResultsResponseBody()
|
16439
|
-
self.body = temp_model.from_map(m['body'])
|
16440
|
-
return self
|
16441
|
-
|
16442
|
-
|
16443
16693
|
class RefreshTokenRequest(TeaModel):
|
16444
16694
|
def __init__(
|
16445
16695
|
self,
|
@@ -17162,11 +17412,18 @@ class UpdateAlertRequest(TeaModel):
|
|
17162
17412
|
display_name: str = None,
|
17163
17413
|
schedule: Schedule = None,
|
17164
17414
|
):
|
17415
|
+
# The detailed configurations of the alert rule.
|
17416
|
+
#
|
17165
17417
|
# This parameter is required.
|
17166
17418
|
self.configuration = configuration
|
17419
|
+
# The description of the alert rule.
|
17167
17420
|
self.description = description
|
17421
|
+
# The display name of the alert rule.
|
17422
|
+
#
|
17168
17423
|
# This parameter is required.
|
17169
17424
|
self.display_name = display_name
|
17425
|
+
# The scheduling settings of the alert rule.
|
17426
|
+
#
|
17170
17427
|
# This parameter is required.
|
17171
17428
|
self.schedule = schedule
|
17172
17429
|
|
@@ -17679,109 +17936,17 @@ class UpdateETLResponse(TeaModel):
|
|
17679
17936
|
return self
|
17680
17937
|
|
17681
17938
|
|
17682
|
-
class UpdateIndexRequestLine(TeaModel):
|
17683
|
-
def __init__(
|
17684
|
-
self,
|
17685
|
-
case_sensitive: bool = None,
|
17686
|
-
chn: bool = None,
|
17687
|
-
exclude_keys: List[str] = None,
|
17688
|
-
include_keys: List[str] = None,
|
17689
|
-
token: List[str] = None,
|
17690
|
-
):
|
17691
|
-
# Specifies whether to enable case sensitivity. Valid values:
|
17692
|
-
#
|
17693
|
-
# * true
|
17694
|
-
# * false
|
17695
|
-
#
|
17696
|
-
# This parameter is required.
|
17697
|
-
self.case_sensitive = case_sensitive
|
17698
|
-
# Specifies whether to include Chinese characters. Valid values:
|
17699
|
-
#
|
17700
|
-
# * true
|
17701
|
-
# * false
|
17702
|
-
#
|
17703
|
-
# This parameter is required.
|
17704
|
-
self.chn = chn
|
17705
|
-
# The excluded fields. You cannot specify both include_keys and exclude_keys.
|
17706
|
-
self.exclude_keys = exclude_keys
|
17707
|
-
# The included fields. You cannot specify both include_keys and exclude_keys.
|
17708
|
-
self.include_keys = include_keys
|
17709
|
-
# The delimiters that are used to split text.
|
17710
|
-
#
|
17711
|
-
# This parameter is required.
|
17712
|
-
self.token = token
|
17713
|
-
|
17714
|
-
def validate(self):
|
17715
|
-
pass
|
17716
|
-
|
17717
|
-
def to_map(self):
|
17718
|
-
_map = super().to_map()
|
17719
|
-
if _map is not None:
|
17720
|
-
return _map
|
17721
|
-
|
17722
|
-
result = dict()
|
17723
|
-
if self.case_sensitive is not None:
|
17724
|
-
result['caseSensitive'] = self.case_sensitive
|
17725
|
-
if self.chn is not None:
|
17726
|
-
result['chn'] = self.chn
|
17727
|
-
if self.exclude_keys is not None:
|
17728
|
-
result['exclude_keys'] = self.exclude_keys
|
17729
|
-
if self.include_keys is not None:
|
17730
|
-
result['include_keys'] = self.include_keys
|
17731
|
-
if self.token is not None:
|
17732
|
-
result['token'] = self.token
|
17733
|
-
return result
|
17734
|
-
|
17735
|
-
def from_map(self, m: dict = None):
|
17736
|
-
m = m or dict()
|
17737
|
-
if m.get('caseSensitive') is not None:
|
17738
|
-
self.case_sensitive = m.get('caseSensitive')
|
17739
|
-
if m.get('chn') is not None:
|
17740
|
-
self.chn = m.get('chn')
|
17741
|
-
if m.get('exclude_keys') is not None:
|
17742
|
-
self.exclude_keys = m.get('exclude_keys')
|
17743
|
-
if m.get('include_keys') is not None:
|
17744
|
-
self.include_keys = m.get('include_keys')
|
17745
|
-
if m.get('token') is not None:
|
17746
|
-
self.token = m.get('token')
|
17747
|
-
return self
|
17748
|
-
|
17749
|
-
|
17750
17939
|
class UpdateIndexRequest(TeaModel):
|
17751
17940
|
def __init__(
|
17752
17941
|
self,
|
17753
|
-
|
17754
|
-
line: UpdateIndexRequestLine = None,
|
17755
|
-
log_reduce: bool = None,
|
17756
|
-
log_reduce_black_list: List[str] = None,
|
17757
|
-
log_reduce_white_list: List[str] = None,
|
17758
|
-
max_text_len: int = None,
|
17759
|
-
ttl: int = None,
|
17942
|
+
body: Index = None,
|
17760
17943
|
):
|
17761
|
-
# The
|
17762
|
-
self.
|
17763
|
-
# The configuration of full-text indexes.
|
17764
|
-
self.line = line
|
17765
|
-
# Specifies whether to turn on LogReduce. If you turn on LogReduce, only one of `log_reduce_white_list` and `log_reduce_black_list` takes effect.
|
17766
|
-
self.log_reduce = log_reduce
|
17767
|
-
# The fields in the blacklist that you want to use to cluster logs.
|
17768
|
-
self.log_reduce_black_list = log_reduce_black_list
|
17769
|
-
# The fields in the whitelist that you want to use to cluster logs.
|
17770
|
-
self.log_reduce_white_list = log_reduce_white_list
|
17771
|
-
# The maximum length of a field value that can be retained.
|
17772
|
-
self.max_text_len = max_text_len
|
17773
|
-
# The retention period of data. Unit: days. Valid values: 7, 30, and 90.
|
17774
|
-
#
|
17775
|
-
# This parameter is required.
|
17776
|
-
self.ttl = ttl
|
17944
|
+
# The request body.
|
17945
|
+
self.body = body
|
17777
17946
|
|
17778
17947
|
def validate(self):
|
17779
|
-
if self.
|
17780
|
-
|
17781
|
-
if v:
|
17782
|
-
v.validate()
|
17783
|
-
if self.line:
|
17784
|
-
self.line.validate()
|
17948
|
+
if self.body:
|
17949
|
+
self.body.validate()
|
17785
17950
|
|
17786
17951
|
def to_map(self):
|
17787
17952
|
_map = super().to_map()
|
@@ -17789,44 +17954,15 @@ class UpdateIndexRequest(TeaModel):
|
|
17789
17954
|
return _map
|
17790
17955
|
|
17791
17956
|
result = dict()
|
17792
|
-
|
17793
|
-
|
17794
|
-
for k, v in self.keys.items():
|
17795
|
-
result['keys'][k] = v.to_map()
|
17796
|
-
if self.line is not None:
|
17797
|
-
result['line'] = self.line.to_map()
|
17798
|
-
if self.log_reduce is not None:
|
17799
|
-
result['log_reduce'] = self.log_reduce
|
17800
|
-
if self.log_reduce_black_list is not None:
|
17801
|
-
result['log_reduce_black_list'] = self.log_reduce_black_list
|
17802
|
-
if self.log_reduce_white_list is not None:
|
17803
|
-
result['log_reduce_white_list'] = self.log_reduce_white_list
|
17804
|
-
if self.max_text_len is not None:
|
17805
|
-
result['max_text_len'] = self.max_text_len
|
17806
|
-
if self.ttl is not None:
|
17807
|
-
result['ttl'] = self.ttl
|
17957
|
+
if self.body is not None:
|
17958
|
+
result['body'] = self.body.to_map()
|
17808
17959
|
return result
|
17809
17960
|
|
17810
17961
|
def from_map(self, m: dict = None):
|
17811
17962
|
m = m or dict()
|
17812
|
-
|
17813
|
-
|
17814
|
-
|
17815
|
-
temp_model = KeysValue()
|
17816
|
-
self.keys[k] = temp_model.from_map(v)
|
17817
|
-
if m.get('line') is not None:
|
17818
|
-
temp_model = UpdateIndexRequestLine()
|
17819
|
-
self.line = temp_model.from_map(m['line'])
|
17820
|
-
if m.get('log_reduce') is not None:
|
17821
|
-
self.log_reduce = m.get('log_reduce')
|
17822
|
-
if m.get('log_reduce_black_list') is not None:
|
17823
|
-
self.log_reduce_black_list = m.get('log_reduce_black_list')
|
17824
|
-
if m.get('log_reduce_white_list') is not None:
|
17825
|
-
self.log_reduce_white_list = m.get('log_reduce_white_list')
|
17826
|
-
if m.get('max_text_len') is not None:
|
17827
|
-
self.max_text_len = m.get('max_text_len')
|
17828
|
-
if m.get('ttl') is not None:
|
17829
|
-
self.ttl = m.get('ttl')
|
17963
|
+
if m.get('body') is not None:
|
17964
|
+
temp_model = Index()
|
17965
|
+
self.body = temp_model.from_map(m['body'])
|
17830
17966
|
return self
|
17831
17967
|
|
17832
17968
|
|
@@ -17875,6 +18011,7 @@ class UpdateLogStoreRequest(TeaModel):
|
|
17875
18011
|
logstore_name: str = None,
|
17876
18012
|
max_split_shard: int = None,
|
17877
18013
|
mode: str = None,
|
18014
|
+
processor_id: str = None,
|
17878
18015
|
shard_count: int = None,
|
17879
18016
|
telemetry_type: str = None,
|
17880
18017
|
ttl: int = None,
|
@@ -17896,32 +18033,34 @@ class UpdateLogStoreRequest(TeaModel):
|
|
17896
18033
|
self.enable_tracking = enable_tracking
|
17897
18034
|
# The data structure of the encryption configuration.
|
17898
18035
|
self.encrypt_conf = encrypt_conf
|
17899
|
-
# The retention period of data in the hot storage tier of the Logstore.
|
18036
|
+
# The retention period of data in the hot storage tier of the Logstore. Valid values: 7 to 3000. Unit: days. After the retention period that is specified for the hot storage tier elapses, the data is moved to the Infrequent Access (IA) storage tier. For more information, see [Enable hot and cold-tiered storage for a Logstore](https://help.aliyun.com/document_detail/308645.html).
|
17900
18037
|
self.hot_ttl = hot_ttl
|
18038
|
+
# The retention period of data in the IA storage tier of the Logstore. You must set this parameter to at least 30 days. After the data retention period that you specify for the IA storage tier elapses, the data is moved to the Archive storage tier.
|
17901
18039
|
self.infrequent_access_ttl = infrequent_access_ttl
|
17902
18040
|
# The name of the Logstore.
|
17903
18041
|
#
|
17904
18042
|
# This parameter is required.
|
17905
18043
|
self.logstore_name = logstore_name
|
17906
|
-
# The maximum number of shards into which existing shards can be automatically split. Valid values: 1 to
|
18044
|
+
# The maximum number of shards into which existing shards can be automatically split. Valid values: 1 to 256.
|
17907
18045
|
#
|
17908
|
-
# >
|
18046
|
+
# > If you set autoSplit to true, you must specify maxSplitShard.
|
17909
18047
|
self.max_split_shard = max_split_shard
|
17910
|
-
# The type of the Logstore. Simple Log Service provides two types of Logstores: Standard Logstores and Query Logstores.
|
18048
|
+
# The type of the Logstore. Simple Log Service provides two types of Logstores: Standard Logstores and Query Logstores. Valid values:
|
17911
18049
|
#
|
17912
18050
|
# * **standard**: Standard Logstore. This type of Logstore supports the log analysis feature and is suitable for scenarios such as real-time monitoring and interactive analysis. You can also use this type of Logstore to build a comprehensive observability system.
|
17913
|
-
# * **query**: Query Logstore. This type of Logstore supports high-performance queries. The index traffic fee of a Query Logstore is approximately half that of a Standard Logstore. Query Logstores do not support SQL analysis. Query Logstores are suitable for scenarios in which the
|
18051
|
+
# * **query**: Query Logstore. This type of Logstore supports high-performance queries. The index traffic fee of a Query Logstore is approximately half that of a Standard Logstore. Query Logstores do not support SQL analysis. Query Logstores are suitable for scenarios in which the amount of data is large, the log retention period is long, or log analysis is not required. If logs are stored for weeks or months, the log retention period is considered long.
|
17914
18052
|
self.mode = mode
|
18053
|
+
self.processor_id = processor_id
|
17915
18054
|
# The number of shards.
|
17916
18055
|
#
|
17917
|
-
# >
|
18056
|
+
# > You cannot call the UpdateLogStore operation to change the number of shards. You can call the SplitShard or MergeShards operation to change the number of shards.
|
17918
18057
|
self.shard_count = shard_count
|
17919
|
-
# The type of the
|
18058
|
+
# The type of the observable data. Valid values:
|
17920
18059
|
#
|
17921
|
-
# * None:
|
17922
|
-
# * Metrics:
|
18060
|
+
# * None (default): log data.
|
18061
|
+
# * Metrics: metric data.
|
17923
18062
|
self.telemetry_type = telemetry_type
|
17924
|
-
# The retention period of data. Unit:
|
18063
|
+
# The retention period of data. Unit: days. Valid values: 1 to 3650. If you set this parameter to 3650, logs are permanently stored.
|
17925
18064
|
#
|
17926
18065
|
# This parameter is required.
|
17927
18066
|
self.ttl = ttl
|
@@ -17954,6 +18093,8 @@ class UpdateLogStoreRequest(TeaModel):
|
|
17954
18093
|
result['maxSplitShard'] = self.max_split_shard
|
17955
18094
|
if self.mode is not None:
|
17956
18095
|
result['mode'] = self.mode
|
18096
|
+
if self.processor_id is not None:
|
18097
|
+
result['processorId'] = self.processor_id
|
17957
18098
|
if self.shard_count is not None:
|
17958
18099
|
result['shardCount'] = self.shard_count
|
17959
18100
|
if self.telemetry_type is not None:
|
@@ -17983,6 +18124,8 @@ class UpdateLogStoreRequest(TeaModel):
|
|
17983
18124
|
self.max_split_shard = m.get('maxSplitShard')
|
17984
18125
|
if m.get('mode') is not None:
|
17985
18126
|
self.mode = m.get('mode')
|
18127
|
+
if m.get('processorId') is not None:
|
18128
|
+
self.processor_id = m.get('processorId')
|
17986
18129
|
if m.get('shardCount') is not None:
|
17987
18130
|
self.shard_count = m.get('shardCount')
|
17988
18131
|
if m.get('telemetryType') is not None:
|
@@ -18533,6 +18676,84 @@ class UpdateMachineGroupMachineResponse(TeaModel):
|
|
18533
18676
|
return self
|
18534
18677
|
|
18535
18678
|
|
18679
|
+
class UpdateMetricStoreRequest(TeaModel):
|
18680
|
+
def __init__(
|
18681
|
+
self,
|
18682
|
+
auto_split: bool = None,
|
18683
|
+
max_split_shard: int = None,
|
18684
|
+
mode: str = None,
|
18685
|
+
ttl: int = None,
|
18686
|
+
):
|
18687
|
+
self.auto_split = auto_split
|
18688
|
+
self.max_split_shard = max_split_shard
|
18689
|
+
self.mode = mode
|
18690
|
+
self.ttl = ttl
|
18691
|
+
|
18692
|
+
def validate(self):
|
18693
|
+
pass
|
18694
|
+
|
18695
|
+
def to_map(self):
|
18696
|
+
_map = super().to_map()
|
18697
|
+
if _map is not None:
|
18698
|
+
return _map
|
18699
|
+
|
18700
|
+
result = dict()
|
18701
|
+
if self.auto_split is not None:
|
18702
|
+
result['autoSplit'] = self.auto_split
|
18703
|
+
if self.max_split_shard is not None:
|
18704
|
+
result['maxSplitShard'] = self.max_split_shard
|
18705
|
+
if self.mode is not None:
|
18706
|
+
result['mode'] = self.mode
|
18707
|
+
if self.ttl is not None:
|
18708
|
+
result['ttl'] = self.ttl
|
18709
|
+
return result
|
18710
|
+
|
18711
|
+
def from_map(self, m: dict = None):
|
18712
|
+
m = m or dict()
|
18713
|
+
if m.get('autoSplit') is not None:
|
18714
|
+
self.auto_split = m.get('autoSplit')
|
18715
|
+
if m.get('maxSplitShard') is not None:
|
18716
|
+
self.max_split_shard = m.get('maxSplitShard')
|
18717
|
+
if m.get('mode') is not None:
|
18718
|
+
self.mode = m.get('mode')
|
18719
|
+
if m.get('ttl') is not None:
|
18720
|
+
self.ttl = m.get('ttl')
|
18721
|
+
return self
|
18722
|
+
|
18723
|
+
|
18724
|
+
class UpdateMetricStoreResponse(TeaModel):
|
18725
|
+
def __init__(
|
18726
|
+
self,
|
18727
|
+
headers: Dict[str, str] = None,
|
18728
|
+
status_code: int = None,
|
18729
|
+
):
|
18730
|
+
self.headers = headers
|
18731
|
+
self.status_code = status_code
|
18732
|
+
|
18733
|
+
def validate(self):
|
18734
|
+
pass
|
18735
|
+
|
18736
|
+
def to_map(self):
|
18737
|
+
_map = super().to_map()
|
18738
|
+
if _map is not None:
|
18739
|
+
return _map
|
18740
|
+
|
18741
|
+
result = dict()
|
18742
|
+
if self.headers is not None:
|
18743
|
+
result['headers'] = self.headers
|
18744
|
+
if self.status_code is not None:
|
18745
|
+
result['statusCode'] = self.status_code
|
18746
|
+
return result
|
18747
|
+
|
18748
|
+
def from_map(self, m: dict = None):
|
18749
|
+
m = m or dict()
|
18750
|
+
if m.get('headers') is not None:
|
18751
|
+
self.headers = m.get('headers')
|
18752
|
+
if m.get('statusCode') is not None:
|
18753
|
+
self.status_code = m.get('statusCode')
|
18754
|
+
return self
|
18755
|
+
|
18756
|
+
|
18536
18757
|
class UpdateMetricStoreMeteringModeRequest(TeaModel):
|
18537
18758
|
def __init__(
|
18538
18759
|
self,
|
@@ -18601,8 +18822,11 @@ class UpdateOSSExportRequest(TeaModel):
|
|
18601
18822
|
description: str = None,
|
18602
18823
|
display_name: str = None,
|
18603
18824
|
):
|
18825
|
+
# The configuration details of the job.
|
18604
18826
|
self.configuration = configuration
|
18827
|
+
# The description of the job.
|
18605
18828
|
self.description = description
|
18829
|
+
# The display name of the job.
|
18606
18830
|
self.display_name = display_name
|
18607
18831
|
|
18608
18832
|
def validate(self):
|
@@ -18675,8 +18899,11 @@ class UpdateOSSHDFSExportRequest(TeaModel):
|
|
18675
18899
|
description: str = None,
|
18676
18900
|
display_name: str = None,
|
18677
18901
|
):
|
18902
|
+
# The configuration details of the job.
|
18678
18903
|
self.configuration = configuration
|
18904
|
+
# The description of the job.
|
18679
18905
|
self.description = description
|
18906
|
+
# The display name of the job.
|
18680
18907
|
self.display_name = display_name
|
18681
18908
|
|
18682
18909
|
def validate(self):
|
@@ -18750,11 +18977,17 @@ class UpdateOSSIngestionRequest(TeaModel):
|
|
18750
18977
|
display_name: str = None,
|
18751
18978
|
schedule: Schedule = None,
|
18752
18979
|
):
|
18980
|
+
# The configurations of the OSS data import job.
|
18981
|
+
#
|
18753
18982
|
# This parameter is required.
|
18754
18983
|
self.configuration = configuration
|
18984
|
+
# The description of the OSS data import job.
|
18755
18985
|
self.description = description
|
18986
|
+
# The display name of the OSS data import job.
|
18987
|
+
#
|
18756
18988
|
# This parameter is required.
|
18757
18989
|
self.display_name = display_name
|
18990
|
+
# The scheduling type. By default, you do not need to specify this parameter. If you want to import data at regular intervals, such as importing data every Monday at 08: 00., you can specify a cron expression.
|
18758
18991
|
self.schedule = schedule
|
18759
18992
|
|
18760
18993
|
def validate(self):
|
@@ -19533,8 +19766,30 @@ class UpdateStoreViewRequest(TeaModel):
|
|
19533
19766
|
store_type: str = None,
|
19534
19767
|
stores: List[StoreViewStore] = None,
|
19535
19768
|
):
|
19769
|
+
# The type of the dataset.
|
19770
|
+
#
|
19771
|
+
# Valid values:
|
19772
|
+
#
|
19773
|
+
# * metricstore
|
19774
|
+
#
|
19775
|
+
# <!-- -->
|
19776
|
+
#
|
19777
|
+
# <!-- -->
|
19778
|
+
#
|
19779
|
+
# <!-- -->
|
19780
|
+
#
|
19781
|
+
# * logstore
|
19782
|
+
#
|
19783
|
+
# <!-- -->
|
19784
|
+
#
|
19785
|
+
# <!-- -->
|
19786
|
+
#
|
19787
|
+
# <!-- -->
|
19788
|
+
#
|
19536
19789
|
# This parameter is required.
|
19537
19790
|
self.store_type = store_type
|
19791
|
+
# The Logstores or Metricstores.
|
19792
|
+
#
|
19538
19793
|
# This parameter is required.
|
19539
19794
|
self.stores = stores
|
19540
19795
|
|