alibabacloud-sls20201230 5.4.0__py3-none-any.whl → 5.5.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- alibabacloud_sls20201230/__init__.py +1 -1
- alibabacloud_sls20201230/client.py +1086 -346
- alibabacloud_sls20201230/models.py +847 -628
- {alibabacloud_sls20201230-5.4.0.dist-info → alibabacloud_sls20201230-5.5.0.dist-info}/METADATA +5 -5
- alibabacloud_sls20201230-5.5.0.dist-info/RECORD +8 -0
- alibabacloud_sls20201230-5.4.0.dist-info/RECORD +0 -8
- {alibabacloud_sls20201230-5.4.0.dist-info → alibabacloud_sls20201230-5.5.0.dist-info}/LICENSE +0 -0
- {alibabacloud_sls20201230-5.4.0.dist-info → alibabacloud_sls20201230-5.5.0.dist-info}/WHEEL +0 -0
- {alibabacloud_sls20201230-5.4.0.dist-info → alibabacloud_sls20201230-5.5.0.dist-info}/top_level.txt +0 -0
@@ -878,7 +878,7 @@ class ConsumeProcessor(TeaModel):
|
|
878
878
|
configuration: ConsumeProcessorConfiguration = None,
|
879
879
|
create_time: int = None,
|
880
880
|
description: str = None,
|
881
|
-
|
881
|
+
display_name: str = None,
|
882
882
|
processor_name: str = None,
|
883
883
|
update_time: int = None,
|
884
884
|
):
|
@@ -887,7 +887,7 @@ class ConsumeProcessor(TeaModel):
|
|
887
887
|
self.create_time = create_time
|
888
888
|
self.description = description
|
889
889
|
# This parameter is required.
|
890
|
-
self.
|
890
|
+
self.display_name = display_name
|
891
891
|
# This parameter is required.
|
892
892
|
self.processor_name = processor_name
|
893
893
|
self.update_time = update_time
|
@@ -908,8 +908,8 @@ class ConsumeProcessor(TeaModel):
|
|
908
908
|
result['createTime'] = self.create_time
|
909
909
|
if self.description is not None:
|
910
910
|
result['description'] = self.description
|
911
|
-
if self.
|
912
|
-
result['
|
911
|
+
if self.display_name is not None:
|
912
|
+
result['displayName'] = self.display_name
|
913
913
|
if self.processor_name is not None:
|
914
914
|
result['processorName'] = self.processor_name
|
915
915
|
if self.update_time is not None:
|
@@ -925,8 +925,8 @@ class ConsumeProcessor(TeaModel):
|
|
925
925
|
self.create_time = m.get('createTime')
|
926
926
|
if m.get('description') is not None:
|
927
927
|
self.description = m.get('description')
|
928
|
-
if m.get('
|
929
|
-
self.
|
928
|
+
if m.get('displayName') is not None:
|
929
|
+
self.display_name = m.get('displayName')
|
930
930
|
if m.get('processorName') is not None:
|
931
931
|
self.processor_name = m.get('processorName')
|
932
932
|
if m.get('updateTime') is not None:
|
@@ -1344,6 +1344,148 @@ class Histogram(TeaModel):
|
|
1344
1344
|
return self
|
1345
1345
|
|
1346
1346
|
|
1347
|
+
class IndexJsonKey(TeaModel):
|
1348
|
+
def __init__(
|
1349
|
+
self,
|
1350
|
+
alias: str = None,
|
1351
|
+
case_sensitive: bool = None,
|
1352
|
+
chn: bool = None,
|
1353
|
+
doc_value: bool = None,
|
1354
|
+
token: List[str] = None,
|
1355
|
+
type: str = None,
|
1356
|
+
):
|
1357
|
+
self.alias = alias
|
1358
|
+
self.case_sensitive = case_sensitive
|
1359
|
+
self.chn = chn
|
1360
|
+
self.doc_value = doc_value
|
1361
|
+
self.token = token
|
1362
|
+
# This parameter is required.
|
1363
|
+
self.type = type
|
1364
|
+
|
1365
|
+
def validate(self):
|
1366
|
+
pass
|
1367
|
+
|
1368
|
+
def to_map(self):
|
1369
|
+
_map = super().to_map()
|
1370
|
+
if _map is not None:
|
1371
|
+
return _map
|
1372
|
+
|
1373
|
+
result = dict()
|
1374
|
+
if self.alias is not None:
|
1375
|
+
result['alias'] = self.alias
|
1376
|
+
if self.case_sensitive is not None:
|
1377
|
+
result['caseSensitive'] = self.case_sensitive
|
1378
|
+
if self.chn is not None:
|
1379
|
+
result['chn'] = self.chn
|
1380
|
+
if self.doc_value is not None:
|
1381
|
+
result['doc_value'] = self.doc_value
|
1382
|
+
if self.token is not None:
|
1383
|
+
result['token'] = self.token
|
1384
|
+
if self.type is not None:
|
1385
|
+
result['type'] = self.type
|
1386
|
+
return result
|
1387
|
+
|
1388
|
+
def from_map(self, m: dict = None):
|
1389
|
+
m = m or dict()
|
1390
|
+
if m.get('alias') is not None:
|
1391
|
+
self.alias = m.get('alias')
|
1392
|
+
if m.get('caseSensitive') is not None:
|
1393
|
+
self.case_sensitive = m.get('caseSensitive')
|
1394
|
+
if m.get('chn') is not None:
|
1395
|
+
self.chn = m.get('chn')
|
1396
|
+
if m.get('doc_value') is not None:
|
1397
|
+
self.doc_value = m.get('doc_value')
|
1398
|
+
if m.get('token') is not None:
|
1399
|
+
self.token = m.get('token')
|
1400
|
+
if m.get('type') is not None:
|
1401
|
+
self.type = m.get('type')
|
1402
|
+
return self
|
1403
|
+
|
1404
|
+
|
1405
|
+
class IndexKey(TeaModel):
|
1406
|
+
def __init__(
|
1407
|
+
self,
|
1408
|
+
alias: str = None,
|
1409
|
+
case_sensitive: bool = None,
|
1410
|
+
chn: bool = None,
|
1411
|
+
doc_value: bool = None,
|
1412
|
+
index_all: bool = None,
|
1413
|
+
json_keys: Dict[str, IndexJsonKey] = None,
|
1414
|
+
max_depth: int = None,
|
1415
|
+
token: List[str] = None,
|
1416
|
+
type: str = None,
|
1417
|
+
):
|
1418
|
+
self.alias = alias
|
1419
|
+
self.case_sensitive = case_sensitive
|
1420
|
+
self.chn = chn
|
1421
|
+
self.doc_value = doc_value
|
1422
|
+
self.index_all = index_all
|
1423
|
+
self.json_keys = json_keys
|
1424
|
+
self.max_depth = max_depth
|
1425
|
+
self.token = token
|
1426
|
+
# This parameter is required.
|
1427
|
+
self.type = type
|
1428
|
+
|
1429
|
+
def validate(self):
|
1430
|
+
if self.json_keys:
|
1431
|
+
for v in self.json_keys.values():
|
1432
|
+
if v:
|
1433
|
+
v.validate()
|
1434
|
+
|
1435
|
+
def to_map(self):
|
1436
|
+
_map = super().to_map()
|
1437
|
+
if _map is not None:
|
1438
|
+
return _map
|
1439
|
+
|
1440
|
+
result = dict()
|
1441
|
+
if self.alias is not None:
|
1442
|
+
result['alias'] = self.alias
|
1443
|
+
if self.case_sensitive is not None:
|
1444
|
+
result['caseSensitive'] = self.case_sensitive
|
1445
|
+
if self.chn is not None:
|
1446
|
+
result['chn'] = self.chn
|
1447
|
+
if self.doc_value is not None:
|
1448
|
+
result['doc_value'] = self.doc_value
|
1449
|
+
if self.index_all is not None:
|
1450
|
+
result['index_all'] = self.index_all
|
1451
|
+
result['json_keys'] = {}
|
1452
|
+
if self.json_keys is not None:
|
1453
|
+
for k, v in self.json_keys.items():
|
1454
|
+
result['json_keys'][k] = v.to_map()
|
1455
|
+
if self.max_depth is not None:
|
1456
|
+
result['max_depth'] = self.max_depth
|
1457
|
+
if self.token is not None:
|
1458
|
+
result['token'] = self.token
|
1459
|
+
if self.type is not None:
|
1460
|
+
result['type'] = self.type
|
1461
|
+
return result
|
1462
|
+
|
1463
|
+
def from_map(self, m: dict = None):
|
1464
|
+
m = m or dict()
|
1465
|
+
if m.get('alias') is not None:
|
1466
|
+
self.alias = m.get('alias')
|
1467
|
+
if m.get('caseSensitive') is not None:
|
1468
|
+
self.case_sensitive = m.get('caseSensitive')
|
1469
|
+
if m.get('chn') is not None:
|
1470
|
+
self.chn = m.get('chn')
|
1471
|
+
if m.get('doc_value') is not None:
|
1472
|
+
self.doc_value = m.get('doc_value')
|
1473
|
+
if m.get('index_all') is not None:
|
1474
|
+
self.index_all = m.get('index_all')
|
1475
|
+
self.json_keys = {}
|
1476
|
+
if m.get('json_keys') is not None:
|
1477
|
+
for k, v in m.get('json_keys').items():
|
1478
|
+
temp_model = IndexJsonKey()
|
1479
|
+
self.json_keys[k] = temp_model.from_map(v)
|
1480
|
+
if m.get('max_depth') is not None:
|
1481
|
+
self.max_depth = m.get('max_depth')
|
1482
|
+
if m.get('token') is not None:
|
1483
|
+
self.token = m.get('token')
|
1484
|
+
if m.get('type') is not None:
|
1485
|
+
self.type = m.get('type')
|
1486
|
+
return self
|
1487
|
+
|
1488
|
+
|
1347
1489
|
class IngestProcessorConfiguration(TeaModel):
|
1348
1490
|
def __init__(
|
1349
1491
|
self,
|
@@ -1384,7 +1526,7 @@ class IngestProcessor(TeaModel):
|
|
1384
1526
|
configuration: IngestProcessorConfiguration = None,
|
1385
1527
|
create_time: int = None,
|
1386
1528
|
description: str = None,
|
1387
|
-
|
1529
|
+
display_name: str = None,
|
1388
1530
|
processor_name: str = None,
|
1389
1531
|
update_time: int = None,
|
1390
1532
|
):
|
@@ -1393,7 +1535,7 @@ class IngestProcessor(TeaModel):
|
|
1393
1535
|
self.create_time = create_time
|
1394
1536
|
self.description = description
|
1395
1537
|
# This parameter is required.
|
1396
|
-
self.
|
1538
|
+
self.display_name = display_name
|
1397
1539
|
# This parameter is required.
|
1398
1540
|
self.processor_name = processor_name
|
1399
1541
|
self.update_time = update_time
|
@@ -1414,8 +1556,8 @@ class IngestProcessor(TeaModel):
|
|
1414
1556
|
result['createTime'] = self.create_time
|
1415
1557
|
if self.description is not None:
|
1416
1558
|
result['description'] = self.description
|
1417
|
-
if self.
|
1418
|
-
result['
|
1559
|
+
if self.display_name is not None:
|
1560
|
+
result['displayName'] = self.display_name
|
1419
1561
|
if self.processor_name is not None:
|
1420
1562
|
result['processorName'] = self.processor_name
|
1421
1563
|
if self.update_time is not None:
|
@@ -1431,8 +1573,8 @@ class IngestProcessor(TeaModel):
|
|
1431
1573
|
self.create_time = m.get('createTime')
|
1432
1574
|
if m.get('description') is not None:
|
1433
1575
|
self.description = m.get('description')
|
1434
|
-
if m.get('
|
1435
|
-
self.
|
1576
|
+
if m.get('displayName') is not None:
|
1577
|
+
self.display_name = m.get('displayName')
|
1436
1578
|
if m.get('processorName') is not None:
|
1437
1579
|
self.processor_name = m.get('processorName')
|
1438
1580
|
if m.get('updateTime') is not None:
|
@@ -1475,41 +1617,6 @@ class LogContent(TeaModel):
|
|
1475
1617
|
return self
|
1476
1618
|
|
1477
1619
|
|
1478
|
-
class LogTag(TeaModel):
|
1479
|
-
def __init__(
|
1480
|
-
self,
|
1481
|
-
key: str = None,
|
1482
|
-
value: str = None,
|
1483
|
-
):
|
1484
|
-
# This parameter is required.
|
1485
|
-
self.key = key
|
1486
|
-
# This parameter is required.
|
1487
|
-
self.value = value
|
1488
|
-
|
1489
|
-
def validate(self):
|
1490
|
-
pass
|
1491
|
-
|
1492
|
-
def to_map(self):
|
1493
|
-
_map = super().to_map()
|
1494
|
-
if _map is not None:
|
1495
|
-
return _map
|
1496
|
-
|
1497
|
-
result = dict()
|
1498
|
-
if self.key is not None:
|
1499
|
-
result['Key'] = self.key
|
1500
|
-
if self.value is not None:
|
1501
|
-
result['Value'] = self.value
|
1502
|
-
return result
|
1503
|
-
|
1504
|
-
def from_map(self, m: dict = None):
|
1505
|
-
m = m or dict()
|
1506
|
-
if m.get('Key') is not None:
|
1507
|
-
self.key = m.get('Key')
|
1508
|
-
if m.get('Value') is not None:
|
1509
|
-
self.value = m.get('Value')
|
1510
|
-
return self
|
1511
|
-
|
1512
|
-
|
1513
1620
|
class LogItem(TeaModel):
|
1514
1621
|
def __init__(
|
1515
1622
|
self,
|
@@ -1553,29 +1660,62 @@ class LogItem(TeaModel):
|
|
1553
1660
|
return self
|
1554
1661
|
|
1555
1662
|
|
1663
|
+
class LogTag(TeaModel):
|
1664
|
+
def __init__(
|
1665
|
+
self,
|
1666
|
+
key: str = None,
|
1667
|
+
value: str = None,
|
1668
|
+
):
|
1669
|
+
# This parameter is required.
|
1670
|
+
self.key = key
|
1671
|
+
# This parameter is required.
|
1672
|
+
self.value = value
|
1673
|
+
|
1674
|
+
def validate(self):
|
1675
|
+
pass
|
1676
|
+
|
1677
|
+
def to_map(self):
|
1678
|
+
_map = super().to_map()
|
1679
|
+
if _map is not None:
|
1680
|
+
return _map
|
1681
|
+
|
1682
|
+
result = dict()
|
1683
|
+
if self.key is not None:
|
1684
|
+
result['Key'] = self.key
|
1685
|
+
if self.value is not None:
|
1686
|
+
result['Value'] = self.value
|
1687
|
+
return result
|
1688
|
+
|
1689
|
+
def from_map(self, m: dict = None):
|
1690
|
+
m = m or dict()
|
1691
|
+
if m.get('Key') is not None:
|
1692
|
+
self.key = m.get('Key')
|
1693
|
+
if m.get('Value') is not None:
|
1694
|
+
self.value = m.get('Value')
|
1695
|
+
return self
|
1696
|
+
|
1697
|
+
|
1556
1698
|
class LogGroup(TeaModel):
|
1557
1699
|
def __init__(
|
1558
1700
|
self,
|
1701
|
+
log_items: List[LogItem] = None,
|
1559
1702
|
log_tags: List[LogTag] = None,
|
1560
|
-
logs: List[LogItem] = None,
|
1561
1703
|
source: str = None,
|
1562
1704
|
topic: str = None,
|
1563
1705
|
):
|
1564
1706
|
# This parameter is required.
|
1707
|
+
self.log_items = log_items
|
1565
1708
|
self.log_tags = log_tags
|
1566
|
-
# This parameter is required.
|
1567
|
-
self.logs = logs
|
1568
1709
|
self.source = source
|
1569
|
-
# This parameter is required.
|
1570
1710
|
self.topic = topic
|
1571
1711
|
|
1572
1712
|
def validate(self):
|
1573
|
-
if self.
|
1574
|
-
for k in self.
|
1713
|
+
if self.log_items:
|
1714
|
+
for k in self.log_items:
|
1575
1715
|
if k:
|
1576
1716
|
k.validate()
|
1577
|
-
if self.
|
1578
|
-
for k in self.
|
1717
|
+
if self.log_tags:
|
1718
|
+
for k in self.log_tags:
|
1579
1719
|
if k:
|
1580
1720
|
k.validate()
|
1581
1721
|
|
@@ -1585,14 +1725,14 @@ class LogGroup(TeaModel):
|
|
1585
1725
|
return _map
|
1586
1726
|
|
1587
1727
|
result = dict()
|
1728
|
+
result['LogItems'] = []
|
1729
|
+
if self.log_items is not None:
|
1730
|
+
for k in self.log_items:
|
1731
|
+
result['LogItems'].append(k.to_map() if k else None)
|
1588
1732
|
result['LogTags'] = []
|
1589
1733
|
if self.log_tags is not None:
|
1590
1734
|
for k in self.log_tags:
|
1591
1735
|
result['LogTags'].append(k.to_map() if k else None)
|
1592
|
-
result['Logs'] = []
|
1593
|
-
if self.logs is not None:
|
1594
|
-
for k in self.logs:
|
1595
|
-
result['Logs'].append(k.to_map() if k else None)
|
1596
1736
|
if self.source is not None:
|
1597
1737
|
result['Source'] = self.source
|
1598
1738
|
if self.topic is not None:
|
@@ -1601,16 +1741,16 @@ class LogGroup(TeaModel):
|
|
1601
1741
|
|
1602
1742
|
def from_map(self, m: dict = None):
|
1603
1743
|
m = m or dict()
|
1744
|
+
self.log_items = []
|
1745
|
+
if m.get('LogItems') is not None:
|
1746
|
+
for k in m.get('LogItems'):
|
1747
|
+
temp_model = LogItem()
|
1748
|
+
self.log_items.append(temp_model.from_map(k))
|
1604
1749
|
self.log_tags = []
|
1605
1750
|
if m.get('LogTags') is not None:
|
1606
1751
|
for k in m.get('LogTags'):
|
1607
1752
|
temp_model = LogTag()
|
1608
1753
|
self.log_tags.append(temp_model.from_map(k))
|
1609
|
-
self.logs = []
|
1610
|
-
if m.get('Logs') is not None:
|
1611
|
-
for k in m.get('Logs'):
|
1612
|
-
temp_model = LogItem()
|
1613
|
-
self.logs.append(temp_model.from_map(k))
|
1614
1754
|
if m.get('Source') is not None:
|
1615
1755
|
self.source = m.get('Source')
|
1616
1756
|
if m.get('Topic') is not None:
|
@@ -1618,18 +1758,56 @@ class LogGroup(TeaModel):
|
|
1618
1758
|
return self
|
1619
1759
|
|
1620
1760
|
|
1761
|
+
class LogGroupList(TeaModel):
|
1762
|
+
def __init__(
|
1763
|
+
self,
|
1764
|
+
log_group_list: List[LogGroup] = None,
|
1765
|
+
):
|
1766
|
+
# This parameter is required.
|
1767
|
+
self.log_group_list = log_group_list
|
1768
|
+
|
1769
|
+
def validate(self):
|
1770
|
+
if self.log_group_list:
|
1771
|
+
for k in self.log_group_list:
|
1772
|
+
if k:
|
1773
|
+
k.validate()
|
1774
|
+
|
1775
|
+
def to_map(self):
|
1776
|
+
_map = super().to_map()
|
1777
|
+
if _map is not None:
|
1778
|
+
return _map
|
1779
|
+
|
1780
|
+
result = dict()
|
1781
|
+
result['logGroupList'] = []
|
1782
|
+
if self.log_group_list is not None:
|
1783
|
+
for k in self.log_group_list:
|
1784
|
+
result['logGroupList'].append(k.to_map() if k else None)
|
1785
|
+
return result
|
1786
|
+
|
1787
|
+
def from_map(self, m: dict = None):
|
1788
|
+
m = m or dict()
|
1789
|
+
self.log_group_list = []
|
1790
|
+
if m.get('logGroupList') is not None:
|
1791
|
+
for k in m.get('logGroupList'):
|
1792
|
+
temp_model = LogGroup()
|
1793
|
+
self.log_group_list.append(temp_model.from_map(k))
|
1794
|
+
return self
|
1795
|
+
|
1796
|
+
|
1621
1797
|
class LogtailConfigOutputDetail(TeaModel):
|
1622
1798
|
def __init__(
|
1623
1799
|
self,
|
1624
1800
|
endpoint: str = None,
|
1625
1801
|
logstore_name: str = None,
|
1626
1802
|
region: str = None,
|
1803
|
+
telemetry_type: str = None,
|
1627
1804
|
):
|
1628
1805
|
# This parameter is required.
|
1629
1806
|
self.endpoint = endpoint
|
1630
1807
|
# This parameter is required.
|
1631
1808
|
self.logstore_name = logstore_name
|
1632
1809
|
self.region = region
|
1810
|
+
self.telemetry_type = telemetry_type
|
1633
1811
|
|
1634
1812
|
def validate(self):
|
1635
1813
|
pass
|
@@ -1646,6 +1824,8 @@ class LogtailConfigOutputDetail(TeaModel):
|
|
1646
1824
|
result['logstoreName'] = self.logstore_name
|
1647
1825
|
if self.region is not None:
|
1648
1826
|
result['region'] = self.region
|
1827
|
+
if self.telemetry_type is not None:
|
1828
|
+
result['telemetryType'] = self.telemetry_type
|
1649
1829
|
return result
|
1650
1830
|
|
1651
1831
|
def from_map(self, m: dict = None):
|
@@ -1656,6 +1836,8 @@ class LogtailConfigOutputDetail(TeaModel):
|
|
1656
1836
|
self.logstore_name = m.get('logstoreName')
|
1657
1837
|
if m.get('region') is not None:
|
1658
1838
|
self.region = m.get('region')
|
1839
|
+
if m.get('telemetryType') is not None:
|
1840
|
+
self.telemetry_type = m.get('telemetryType')
|
1659
1841
|
return self
|
1660
1842
|
|
1661
1843
|
|
@@ -3789,85 +3971,22 @@ class IndexLine(TeaModel):
|
|
3789
3971
|
return self
|
3790
3972
|
|
3791
3973
|
|
3792
|
-
class
|
3974
|
+
class Index(TeaModel):
|
3793
3975
|
def __init__(
|
3794
3976
|
self,
|
3795
|
-
|
3796
|
-
case_sensitive: bool = None,
|
3797
|
-
token: List[str] = None,
|
3798
|
-
alias: str = None,
|
3799
|
-
type: str = None,
|
3800
|
-
doc_value: bool = None,
|
3801
|
-
):
|
3802
|
-
self.chn = chn
|
3803
|
-
self.case_sensitive = case_sensitive
|
3804
|
-
self.token = token
|
3805
|
-
self.alias = alias
|
3806
|
-
# This parameter is required.
|
3807
|
-
self.type = type
|
3808
|
-
self.doc_value = doc_value
|
3809
|
-
|
3810
|
-
def validate(self):
|
3811
|
-
pass
|
3812
|
-
|
3813
|
-
def to_map(self):
|
3814
|
-
_map = super().to_map()
|
3815
|
-
if _map is not None:
|
3816
|
-
return _map
|
3817
|
-
|
3818
|
-
result = dict()
|
3819
|
-
if self.chn is not None:
|
3820
|
-
result['chn'] = self.chn
|
3821
|
-
if self.case_sensitive is not None:
|
3822
|
-
result['caseSensitive'] = self.case_sensitive
|
3823
|
-
if self.token is not None:
|
3824
|
-
result['token'] = self.token
|
3825
|
-
if self.alias is not None:
|
3826
|
-
result['alias'] = self.alias
|
3827
|
-
if self.type is not None:
|
3828
|
-
result['type'] = self.type
|
3829
|
-
if self.doc_value is not None:
|
3830
|
-
result['doc_value'] = self.doc_value
|
3831
|
-
return result
|
3832
|
-
|
3833
|
-
def from_map(self, m: dict = None):
|
3834
|
-
m = m or dict()
|
3835
|
-
if m.get('chn') is not None:
|
3836
|
-
self.chn = m.get('chn')
|
3837
|
-
if m.get('caseSensitive') is not None:
|
3838
|
-
self.case_sensitive = m.get('caseSensitive')
|
3839
|
-
if m.get('token') is not None:
|
3840
|
-
self.token = m.get('token')
|
3841
|
-
if m.get('alias') is not None:
|
3842
|
-
self.alias = m.get('alias')
|
3843
|
-
if m.get('type') is not None:
|
3844
|
-
self.type = m.get('type')
|
3845
|
-
if m.get('doc_value') is not None:
|
3846
|
-
self.doc_value = m.get('doc_value')
|
3847
|
-
return self
|
3848
|
-
|
3849
|
-
|
3850
|
-
class Index(TeaModel):
|
3851
|
-
def __init__(
|
3852
|
-
self,
|
3853
|
-
keys: Dict[str, IndexKeysValue] = None,
|
3854
|
-
last_modify_time: int = None,
|
3977
|
+
keys: Dict[str, IndexKey] = None,
|
3855
3978
|
line: IndexLine = None,
|
3856
3979
|
log_reduce: bool = None,
|
3857
3980
|
log_reduce_black_list: List[str] = None,
|
3858
3981
|
log_reduce_white_list: List[str] = None,
|
3859
3982
|
max_text_len: int = None,
|
3860
|
-
ttl: int = None,
|
3861
3983
|
):
|
3862
3984
|
self.keys = keys
|
3863
|
-
self.last_modify_time = last_modify_time
|
3864
3985
|
self.line = line
|
3865
3986
|
self.log_reduce = log_reduce
|
3866
3987
|
self.log_reduce_black_list = log_reduce_black_list
|
3867
3988
|
self.log_reduce_white_list = log_reduce_white_list
|
3868
3989
|
self.max_text_len = max_text_len
|
3869
|
-
# This parameter is required.
|
3870
|
-
self.ttl = ttl
|
3871
3990
|
|
3872
3991
|
def validate(self):
|
3873
3992
|
if self.keys:
|
@@ -3887,8 +4006,6 @@ class Index(TeaModel):
|
|
3887
4006
|
if self.keys is not None:
|
3888
4007
|
for k, v in self.keys.items():
|
3889
4008
|
result['keys'][k] = v.to_map()
|
3890
|
-
if self.last_modify_time is not None:
|
3891
|
-
result['lastModifyTime'] = self.last_modify_time
|
3892
4009
|
if self.line is not None:
|
3893
4010
|
result['line'] = self.line.to_map()
|
3894
4011
|
if self.log_reduce is not None:
|
@@ -3899,8 +4016,6 @@ class Index(TeaModel):
|
|
3899
4016
|
result['log_reduce_white_list'] = self.log_reduce_white_list
|
3900
4017
|
if self.max_text_len is not None:
|
3901
4018
|
result['max_text_len'] = self.max_text_len
|
3902
|
-
if self.ttl is not None:
|
3903
|
-
result['ttl'] = self.ttl
|
3904
4019
|
return result
|
3905
4020
|
|
3906
4021
|
def from_map(self, m: dict = None):
|
@@ -3908,10 +4023,8 @@ class Index(TeaModel):
|
|
3908
4023
|
self.keys = {}
|
3909
4024
|
if m.get('keys') is not None:
|
3910
4025
|
for k, v in m.get('keys').items():
|
3911
|
-
temp_model =
|
4026
|
+
temp_model = IndexKey()
|
3912
4027
|
self.keys[k] = temp_model.from_map(v)
|
3913
|
-
if m.get('lastModifyTime') is not None:
|
3914
|
-
self.last_modify_time = m.get('lastModifyTime')
|
3915
4028
|
if m.get('line') is not None:
|
3916
4029
|
temp_model = IndexLine()
|
3917
4030
|
self.line = temp_model.from_map(m['line'])
|
@@ -3923,8 +4036,6 @@ class Index(TeaModel):
|
|
3923
4036
|
self.log_reduce_white_list = m.get('log_reduce_white_list')
|
3924
4037
|
if m.get('max_text_len') is not None:
|
3925
4038
|
self.max_text_len = m.get('max_text_len')
|
3926
|
-
if m.get('ttl') is not None:
|
3927
|
-
self.ttl = m.get('ttl')
|
3928
4039
|
return self
|
3929
4040
|
|
3930
4041
|
|
@@ -4429,89 +4540,6 @@ class Shard(TeaModel):
|
|
4429
4540
|
return self
|
4430
4541
|
|
4431
4542
|
|
4432
|
-
class KeysValue(TeaModel):
|
4433
|
-
def __init__(
|
4434
|
-
self,
|
4435
|
-
case_sensitive: bool = None,
|
4436
|
-
chn: bool = None,
|
4437
|
-
type: str = None,
|
4438
|
-
alias: str = None,
|
4439
|
-
token: List[str] = None,
|
4440
|
-
doc_value: bool = None,
|
4441
|
-
vector_index: str = None,
|
4442
|
-
embedding: str = None,
|
4443
|
-
):
|
4444
|
-
# Specifies whether to enable case sensitivity. This parameter is required only when **type** is set to **text**. Valid values:
|
4445
|
-
#
|
4446
|
-
# * true
|
4447
|
-
# * false (default)
|
4448
|
-
self.case_sensitive = case_sensitive
|
4449
|
-
# Specifies whether to include Chinese characters. This parameter is required only when **type** is set to **text**. Valid values:
|
4450
|
-
#
|
4451
|
-
# * true
|
4452
|
-
# * false (default)
|
4453
|
-
self.chn = chn
|
4454
|
-
# The data type of the field value. Valid values: text, json, double, and long.
|
4455
|
-
#
|
4456
|
-
# This parameter is required.
|
4457
|
-
self.type = type
|
4458
|
-
# The alias of the field.
|
4459
|
-
self.alias = alias
|
4460
|
-
# The delimiters that are used to split text.
|
4461
|
-
self.token = token
|
4462
|
-
# Specifies whether to turn on Enable Analytics for the field.
|
4463
|
-
self.doc_value = doc_value
|
4464
|
-
self.vector_index = vector_index
|
4465
|
-
self.embedding = embedding
|
4466
|
-
|
4467
|
-
def validate(self):
|
4468
|
-
pass
|
4469
|
-
|
4470
|
-
def to_map(self):
|
4471
|
-
_map = super().to_map()
|
4472
|
-
if _map is not None:
|
4473
|
-
return _map
|
4474
|
-
|
4475
|
-
result = dict()
|
4476
|
-
if self.case_sensitive is not None:
|
4477
|
-
result['caseSensitive'] = self.case_sensitive
|
4478
|
-
if self.chn is not None:
|
4479
|
-
result['chn'] = self.chn
|
4480
|
-
if self.type is not None:
|
4481
|
-
result['type'] = self.type
|
4482
|
-
if self.alias is not None:
|
4483
|
-
result['alias'] = self.alias
|
4484
|
-
if self.token is not None:
|
4485
|
-
result['token'] = self.token
|
4486
|
-
if self.doc_value is not None:
|
4487
|
-
result['doc_value'] = self.doc_value
|
4488
|
-
if self.vector_index is not None:
|
4489
|
-
result['vector_index'] = self.vector_index
|
4490
|
-
if self.embedding is not None:
|
4491
|
-
result['embedding'] = self.embedding
|
4492
|
-
return result
|
4493
|
-
|
4494
|
-
def from_map(self, m: dict = None):
|
4495
|
-
m = m or dict()
|
4496
|
-
if m.get('caseSensitive') is not None:
|
4497
|
-
self.case_sensitive = m.get('caseSensitive')
|
4498
|
-
if m.get('chn') is not None:
|
4499
|
-
self.chn = m.get('chn')
|
4500
|
-
if m.get('type') is not None:
|
4501
|
-
self.type = m.get('type')
|
4502
|
-
if m.get('alias') is not None:
|
4503
|
-
self.alias = m.get('alias')
|
4504
|
-
if m.get('token') is not None:
|
4505
|
-
self.token = m.get('token')
|
4506
|
-
if m.get('doc_value') is not None:
|
4507
|
-
self.doc_value = m.get('doc_value')
|
4508
|
-
if m.get('vector_index') is not None:
|
4509
|
-
self.vector_index = m.get('vector_index')
|
4510
|
-
if m.get('embedding') is not None:
|
4511
|
-
self.embedding = m.get('embedding')
|
4512
|
-
return self
|
4513
|
-
|
4514
|
-
|
4515
4543
|
class ApplyConfigToMachineGroupResponse(TeaModel):
|
4516
4544
|
def __init__(
|
4517
4545
|
self,
|
@@ -5629,103 +5657,17 @@ class CreateETLResponse(TeaModel):
|
|
5629
5657
|
return self
|
5630
5658
|
|
5631
5659
|
|
5632
|
-
class CreateIndexRequestLine(TeaModel):
|
5633
|
-
def __init__(
|
5634
|
-
self,
|
5635
|
-
case_sensitive: bool = None,
|
5636
|
-
chn: bool = None,
|
5637
|
-
exclude_keys: List[str] = None,
|
5638
|
-
include_keys: List[str] = None,
|
5639
|
-
token: List[str] = None,
|
5640
|
-
):
|
5641
|
-
# Specifies whether to enable case sensitivity. Valid values:
|
5642
|
-
#
|
5643
|
-
# * true
|
5644
|
-
# * false (default)
|
5645
|
-
self.case_sensitive = case_sensitive
|
5646
|
-
# Specifies whether to include Chinese characters. Valid values:
|
5647
|
-
#
|
5648
|
-
# * true
|
5649
|
-
# * false (default)
|
5650
|
-
self.chn = chn
|
5651
|
-
# The excluded fields. You cannot specify both include_keys and exclude_keys.
|
5652
|
-
self.exclude_keys = exclude_keys
|
5653
|
-
# The included fields. You cannot specify both include_keys and exclude_keys.
|
5654
|
-
self.include_keys = include_keys
|
5655
|
-
# The delimiters. You can specify a delimiter to delimit the content of a field value. For more information about delimiters, see Example.
|
5656
|
-
#
|
5657
|
-
# This parameter is required.
|
5658
|
-
self.token = token
|
5659
|
-
|
5660
|
-
def validate(self):
|
5661
|
-
pass
|
5662
|
-
|
5663
|
-
def to_map(self):
|
5664
|
-
_map = super().to_map()
|
5665
|
-
if _map is not None:
|
5666
|
-
return _map
|
5667
|
-
|
5668
|
-
result = dict()
|
5669
|
-
if self.case_sensitive is not None:
|
5670
|
-
result['caseSensitive'] = self.case_sensitive
|
5671
|
-
if self.chn is not None:
|
5672
|
-
result['chn'] = self.chn
|
5673
|
-
if self.exclude_keys is not None:
|
5674
|
-
result['exclude_keys'] = self.exclude_keys
|
5675
|
-
if self.include_keys is not None:
|
5676
|
-
result['include_keys'] = self.include_keys
|
5677
|
-
if self.token is not None:
|
5678
|
-
result['token'] = self.token
|
5679
|
-
return result
|
5680
|
-
|
5681
|
-
def from_map(self, m: dict = None):
|
5682
|
-
m = m or dict()
|
5683
|
-
if m.get('caseSensitive') is not None:
|
5684
|
-
self.case_sensitive = m.get('caseSensitive')
|
5685
|
-
if m.get('chn') is not None:
|
5686
|
-
self.chn = m.get('chn')
|
5687
|
-
if m.get('exclude_keys') is not None:
|
5688
|
-
self.exclude_keys = m.get('exclude_keys')
|
5689
|
-
if m.get('include_keys') is not None:
|
5690
|
-
self.include_keys = m.get('include_keys')
|
5691
|
-
if m.get('token') is not None:
|
5692
|
-
self.token = m.get('token')
|
5693
|
-
return self
|
5694
|
-
|
5695
|
-
|
5696
5660
|
class CreateIndexRequest(TeaModel):
|
5697
5661
|
def __init__(
|
5698
5662
|
self,
|
5699
|
-
|
5700
|
-
line: CreateIndexRequestLine = None,
|
5701
|
-
log_reduce: bool = None,
|
5702
|
-
log_reduce_black_list: List[str] = None,
|
5703
|
-
log_reduce_white_list: List[str] = None,
|
5704
|
-
max_text_len: int = None,
|
5705
|
-
ttl: int = None,
|
5663
|
+
body: Index = None,
|
5706
5664
|
):
|
5707
|
-
# The
|
5708
|
-
self.
|
5709
|
-
# The configuration of full-text indexes. You must specify this parameter, the keys parameter, or both parameters. For more information, see Example.
|
5710
|
-
self.line = line
|
5711
|
-
# Specifies whether to turn on LogReduce. After you turn on LogReduce, either the whitelist or blacklist takes effect.
|
5712
|
-
self.log_reduce = log_reduce
|
5713
|
-
# The fields in the blacklist that you want to use to cluster logs.
|
5714
|
-
self.log_reduce_black_list = log_reduce_black_list
|
5715
|
-
# The fields in the whitelist that you want to use to cluster logs.
|
5716
|
-
self.log_reduce_white_list = log_reduce_white_list
|
5717
|
-
# The maximum length of a field value that can be retained. Default value: 2048. Unit: bytes. The default value is equal to 2 KB. You can change the value of max_text_len. Valid values: 64 to 16384.
|
5718
|
-
self.max_text_len = max_text_len
|
5719
|
-
# The retention period of logs. Unit: days. Valid values: 7, 30, and 90.
|
5720
|
-
self.ttl = ttl
|
5665
|
+
# The request body.
|
5666
|
+
self.body = body
|
5721
5667
|
|
5722
5668
|
def validate(self):
|
5723
|
-
if self.
|
5724
|
-
|
5725
|
-
if v:
|
5726
|
-
v.validate()
|
5727
|
-
if self.line:
|
5728
|
-
self.line.validate()
|
5669
|
+
if self.body:
|
5670
|
+
self.body.validate()
|
5729
5671
|
|
5730
5672
|
def to_map(self):
|
5731
5673
|
_map = super().to_map()
|
@@ -5733,44 +5675,15 @@ class CreateIndexRequest(TeaModel):
|
|
5733
5675
|
return _map
|
5734
5676
|
|
5735
5677
|
result = dict()
|
5736
|
-
|
5737
|
-
|
5738
|
-
for k, v in self.keys.items():
|
5739
|
-
result['keys'][k] = v.to_map()
|
5740
|
-
if self.line is not None:
|
5741
|
-
result['line'] = self.line.to_map()
|
5742
|
-
if self.log_reduce is not None:
|
5743
|
-
result['log_reduce'] = self.log_reduce
|
5744
|
-
if self.log_reduce_black_list is not None:
|
5745
|
-
result['log_reduce_black_list'] = self.log_reduce_black_list
|
5746
|
-
if self.log_reduce_white_list is not None:
|
5747
|
-
result['log_reduce_white_list'] = self.log_reduce_white_list
|
5748
|
-
if self.max_text_len is not None:
|
5749
|
-
result['max_text_len'] = self.max_text_len
|
5750
|
-
if self.ttl is not None:
|
5751
|
-
result['ttl'] = self.ttl
|
5678
|
+
if self.body is not None:
|
5679
|
+
result['body'] = self.body.to_map()
|
5752
5680
|
return result
|
5753
5681
|
|
5754
5682
|
def from_map(self, m: dict = None):
|
5755
5683
|
m = m or dict()
|
5756
|
-
|
5757
|
-
|
5758
|
-
|
5759
|
-
temp_model = KeysValue()
|
5760
|
-
self.keys[k] = temp_model.from_map(v)
|
5761
|
-
if m.get('line') is not None:
|
5762
|
-
temp_model = CreateIndexRequestLine()
|
5763
|
-
self.line = temp_model.from_map(m['line'])
|
5764
|
-
if m.get('log_reduce') is not None:
|
5765
|
-
self.log_reduce = m.get('log_reduce')
|
5766
|
-
if m.get('log_reduce_black_list') is not None:
|
5767
|
-
self.log_reduce_black_list = m.get('log_reduce_black_list')
|
5768
|
-
if m.get('log_reduce_white_list') is not None:
|
5769
|
-
self.log_reduce_white_list = m.get('log_reduce_white_list')
|
5770
|
-
if m.get('max_text_len') is not None:
|
5771
|
-
self.max_text_len = m.get('max_text_len')
|
5772
|
-
if m.get('ttl') is not None:
|
5773
|
-
self.ttl = m.get('ttl')
|
5684
|
+
if m.get('body') is not None:
|
5685
|
+
temp_model = Index()
|
5686
|
+
self.body = temp_model.from_map(m['body'])
|
5774
5687
|
return self
|
5775
5688
|
|
5776
5689
|
|
@@ -5824,17 +5737,17 @@ class CreateLogStoreRequest(TeaModel):
|
|
5824
5737
|
telemetry_type: str = None,
|
5825
5738
|
ttl: int = None,
|
5826
5739
|
):
|
5827
|
-
# Specifies whether to record public IP
|
5740
|
+
# Specifies whether to record the **public IP address** and **log receiving time**. Default value: false. Valid values:
|
5828
5741
|
#
|
5829
|
-
# * true
|
5830
|
-
# * false
|
5742
|
+
# * true********\
|
5743
|
+
# * false********\
|
5831
5744
|
self.append_meta = append_meta
|
5832
|
-
# Specifies whether to enable automatic sharding.
|
5745
|
+
# Specifies whether to enable automatic sharding. Valid values:
|
5833
5746
|
#
|
5834
5747
|
# * true
|
5835
5748
|
# * false
|
5836
5749
|
self.auto_split = auto_split
|
5837
|
-
# Specifies whether to enable the web tracking feature. Default value: false.
|
5750
|
+
# Specifies whether to enable the web tracking feature. Default value: false. Valid values:
|
5838
5751
|
#
|
5839
5752
|
# * true
|
5840
5753
|
# * false
|
@@ -5858,12 +5771,12 @@ class CreateLogStoreRequest(TeaModel):
|
|
5858
5771
|
self.logstore_name = logstore_name
|
5859
5772
|
# The maximum number of shards into which existing shards can be automatically split. Valid values: 1 to 256.
|
5860
5773
|
#
|
5861
|
-
# > If you set autoSplit to true, you must specify
|
5774
|
+
# > If you set autoSplit to true, you must specify this parameter.
|
5862
5775
|
self.max_split_shard = max_split_shard
|
5863
5776
|
# The type of the Logstore. Simple Log Service provides two types of Logstores: Standard Logstores and Query Logstores. Valid values:
|
5864
5777
|
#
|
5865
5778
|
# * **standard**: Standard Logstore. This type of Logstore supports the log analysis feature and is suitable for scenarios such as real-time monitoring and interactive analysis. You can also use this type of Logstore to build a comprehensive observability system.
|
5866
|
-
# * **query**: Query Logstore. This type of Logstore supports high-performance queries. The index traffic fee of a
|
5779
|
+
# * **query**: Query Logstore. This type of Logstore supports high-performance queries. The index traffic fee of a Query Logstore is approximately half that of a Standard Logstore. Query Logstores do not support SQL analysis. Query Logstores are suitable for scenarios in which the amount of data is large, the log retention period is long, or log analysis is not required. If logs are stored for weeks or months, the log retention period is considered long.
|
5867
5780
|
self.mode = mode
|
5868
5781
|
self.processor_id = processor_id
|
5869
5782
|
# The number of shards.
|
@@ -5877,7 +5790,7 @@ class CreateLogStoreRequest(TeaModel):
|
|
5877
5790
|
# * **None** (default): log data
|
5878
5791
|
# * **Metrics**: metric data
|
5879
5792
|
self.telemetry_type = telemetry_type
|
5880
|
-
# The retention period of data. Unit: days. Valid values: 1 to 3000. If you set this parameter to 3650,
|
5793
|
+
# The retention period of data. Unit: days. Valid values: 1 to 3000. If you set this parameter to 3650, data is permanently stored.
|
5881
5794
|
#
|
5882
5795
|
# This parameter is required.
|
5883
5796
|
self.ttl = ttl
|
@@ -6376,14 +6289,24 @@ class CreateMetricStoreRequest(TeaModel):
|
|
6376
6289
|
shard_count: int = None,
|
6377
6290
|
ttl: int = None,
|
6378
6291
|
):
|
6292
|
+
# Specifies whether to enable automatic sharding.
|
6379
6293
|
self.auto_split = auto_split
|
6294
|
+
# The maximum number of shards into which existing shards can be automatically split. This parameter is valid only when you set the autoSplit parameter to true.
|
6380
6295
|
self.max_split_shard = max_split_shard
|
6296
|
+
# The type of the metric data. Example: prometheus.
|
6381
6297
|
self.metric_type = metric_type
|
6298
|
+
# The type of the Metricstore. For example, you can set the parameter to standard to query Standard Metricstores.
|
6382
6299
|
self.mode = mode
|
6300
|
+
# The name of the Metricstore.
|
6301
|
+
#
|
6383
6302
|
# This parameter is required.
|
6384
6303
|
self.name = name
|
6304
|
+
# The number of shards in the Metricstore.
|
6305
|
+
#
|
6385
6306
|
# This parameter is required.
|
6386
6307
|
self.shard_count = shard_count
|
6308
|
+
# The retention period of the metric data in the Metricstore. Unit: days.
|
6309
|
+
#
|
6387
6310
|
# This parameter is required.
|
6388
6311
|
self.ttl = ttl
|
6389
6312
|
|
@@ -7232,11 +7155,13 @@ class CreateSavedSearchRequest(TeaModel):
|
|
7232
7155
|
#
|
7233
7156
|
# This parameter is required.
|
7234
7157
|
self.savedsearch_name = savedsearch_name
|
7235
|
-
# The query statement of the saved search. A query statement consists of a search statement and an analytic statement in the `Search statement|Analytic statement` format. For more information
|
7158
|
+
# The query statement of the saved search. A query statement consists of a search statement and an analytic statement in the `Search statement|Analytic statement` format. For more information, see [Log search overview](https://help.aliyun.com/document_detail/43772.html) and [Log analysis overview](https://help.aliyun.com/document_detail/53608.html).
|
7236
7159
|
#
|
7237
7160
|
# This parameter is required.
|
7238
7161
|
self.search_query = search_query
|
7239
|
-
# The topic of the
|
7162
|
+
# The topic of the logs.
|
7163
|
+
#
|
7164
|
+
# This parameter is required.
|
7240
7165
|
self.topic = topic
|
7241
7166
|
|
7242
7167
|
def validate(self):
|
@@ -7573,6 +7498,7 @@ class CreateTicketRequest(TeaModel):
|
|
7573
7498
|
expiration_time: int = None,
|
7574
7499
|
):
|
7575
7500
|
self.access_token_expiration_time = access_token_expiration_time
|
7501
|
+
# The validity period of the ticket that is used for logon-free access. Unit: seconds. Default value: 86400. Maximum value: 2592000. The value 86400 specifies one day.
|
7576
7502
|
self.expiration_time = expiration_time
|
7577
7503
|
|
7578
7504
|
def validate(self):
|
@@ -7604,6 +7530,7 @@ class CreateTicketResponseBody(TeaModel):
|
|
7604
7530
|
self,
|
7605
7531
|
ticket: str = None,
|
7606
7532
|
):
|
7533
|
+
# The ticket that is used for logon-free access.
|
7607
7534
|
self.ticket = ticket
|
7608
7535
|
|
7609
7536
|
def validate(self):
|
@@ -8558,11 +8485,159 @@ class DeleteStoreViewResponse(TeaModel):
|
|
8558
8485
|
return self
|
8559
8486
|
|
8560
8487
|
|
8561
|
-
class
|
8488
|
+
class DescribeRegionsRequest(TeaModel):
|
8562
8489
|
def __init__(
|
8563
8490
|
self,
|
8564
|
-
|
8565
|
-
|
8491
|
+
language: str = None,
|
8492
|
+
):
|
8493
|
+
self.language = language
|
8494
|
+
|
8495
|
+
def validate(self):
|
8496
|
+
pass
|
8497
|
+
|
8498
|
+
def to_map(self):
|
8499
|
+
_map = super().to_map()
|
8500
|
+
if _map is not None:
|
8501
|
+
return _map
|
8502
|
+
|
8503
|
+
result = dict()
|
8504
|
+
if self.language is not None:
|
8505
|
+
result['language'] = self.language
|
8506
|
+
return result
|
8507
|
+
|
8508
|
+
def from_map(self, m: dict = None):
|
8509
|
+
m = m or dict()
|
8510
|
+
if m.get('language') is not None:
|
8511
|
+
self.language = m.get('language')
|
8512
|
+
return self
|
8513
|
+
|
8514
|
+
|
8515
|
+
class DescribeRegionsResponseBodyRegions(TeaModel):
|
8516
|
+
def __init__(
|
8517
|
+
self,
|
8518
|
+
internet_endpoint: str = None,
|
8519
|
+
intranet_endpoint: str = None,
|
8520
|
+
local_name: str = None,
|
8521
|
+
region: str = None,
|
8522
|
+
):
|
8523
|
+
self.internet_endpoint = internet_endpoint
|
8524
|
+
self.intranet_endpoint = intranet_endpoint
|
8525
|
+
self.local_name = local_name
|
8526
|
+
self.region = region
|
8527
|
+
|
8528
|
+
def validate(self):
|
8529
|
+
pass
|
8530
|
+
|
8531
|
+
def to_map(self):
|
8532
|
+
_map = super().to_map()
|
8533
|
+
if _map is not None:
|
8534
|
+
return _map
|
8535
|
+
|
8536
|
+
result = dict()
|
8537
|
+
if self.internet_endpoint is not None:
|
8538
|
+
result['internetEndpoint'] = self.internet_endpoint
|
8539
|
+
if self.intranet_endpoint is not None:
|
8540
|
+
result['intranetEndpoint'] = self.intranet_endpoint
|
8541
|
+
if self.local_name is not None:
|
8542
|
+
result['localName'] = self.local_name
|
8543
|
+
if self.region is not None:
|
8544
|
+
result['region'] = self.region
|
8545
|
+
return result
|
8546
|
+
|
8547
|
+
def from_map(self, m: dict = None):
|
8548
|
+
m = m or dict()
|
8549
|
+
if m.get('internetEndpoint') is not None:
|
8550
|
+
self.internet_endpoint = m.get('internetEndpoint')
|
8551
|
+
if m.get('intranetEndpoint') is not None:
|
8552
|
+
self.intranet_endpoint = m.get('intranetEndpoint')
|
8553
|
+
if m.get('localName') is not None:
|
8554
|
+
self.local_name = m.get('localName')
|
8555
|
+
if m.get('region') is not None:
|
8556
|
+
self.region = m.get('region')
|
8557
|
+
return self
|
8558
|
+
|
8559
|
+
|
8560
|
+
class DescribeRegionsResponseBody(TeaModel):
|
8561
|
+
def __init__(
|
8562
|
+
self,
|
8563
|
+
regions: List[DescribeRegionsResponseBodyRegions] = None,
|
8564
|
+
):
|
8565
|
+
self.regions = regions
|
8566
|
+
|
8567
|
+
def validate(self):
|
8568
|
+
if self.regions:
|
8569
|
+
for k in self.regions:
|
8570
|
+
if k:
|
8571
|
+
k.validate()
|
8572
|
+
|
8573
|
+
def to_map(self):
|
8574
|
+
_map = super().to_map()
|
8575
|
+
if _map is not None:
|
8576
|
+
return _map
|
8577
|
+
|
8578
|
+
result = dict()
|
8579
|
+
result['regions'] = []
|
8580
|
+
if self.regions is not None:
|
8581
|
+
for k in self.regions:
|
8582
|
+
result['regions'].append(k.to_map() if k else None)
|
8583
|
+
return result
|
8584
|
+
|
8585
|
+
def from_map(self, m: dict = None):
|
8586
|
+
m = m or dict()
|
8587
|
+
self.regions = []
|
8588
|
+
if m.get('regions') is not None:
|
8589
|
+
for k in m.get('regions'):
|
8590
|
+
temp_model = DescribeRegionsResponseBodyRegions()
|
8591
|
+
self.regions.append(temp_model.from_map(k))
|
8592
|
+
return self
|
8593
|
+
|
8594
|
+
|
8595
|
+
class DescribeRegionsResponse(TeaModel):
|
8596
|
+
def __init__(
|
8597
|
+
self,
|
8598
|
+
headers: Dict[str, str] = None,
|
8599
|
+
status_code: int = None,
|
8600
|
+
body: DescribeRegionsResponseBody = None,
|
8601
|
+
):
|
8602
|
+
self.headers = headers
|
8603
|
+
self.status_code = status_code
|
8604
|
+
self.body = body
|
8605
|
+
|
8606
|
+
def validate(self):
|
8607
|
+
if self.body:
|
8608
|
+
self.body.validate()
|
8609
|
+
|
8610
|
+
def to_map(self):
|
8611
|
+
_map = super().to_map()
|
8612
|
+
if _map is not None:
|
8613
|
+
return _map
|
8614
|
+
|
8615
|
+
result = dict()
|
8616
|
+
if self.headers is not None:
|
8617
|
+
result['headers'] = self.headers
|
8618
|
+
if self.status_code is not None:
|
8619
|
+
result['statusCode'] = self.status_code
|
8620
|
+
if self.body is not None:
|
8621
|
+
result['body'] = self.body.to_map()
|
8622
|
+
return result
|
8623
|
+
|
8624
|
+
def from_map(self, m: dict = None):
|
8625
|
+
m = m or dict()
|
8626
|
+
if m.get('headers') is not None:
|
8627
|
+
self.headers = m.get('headers')
|
8628
|
+
if m.get('statusCode') is not None:
|
8629
|
+
self.status_code = m.get('statusCode')
|
8630
|
+
if m.get('body') is not None:
|
8631
|
+
temp_model = DescribeRegionsResponseBody()
|
8632
|
+
self.body = temp_model.from_map(m['body'])
|
8633
|
+
return self
|
8634
|
+
|
8635
|
+
|
8636
|
+
class DisableAlertResponse(TeaModel):
|
8637
|
+
def __init__(
|
8638
|
+
self,
|
8639
|
+
headers: Dict[str, str] = None,
|
8640
|
+
status_code: int = None,
|
8566
8641
|
):
|
8567
8642
|
self.headers = headers
|
8568
8643
|
self.status_code = status_code
|
@@ -9538,7 +9613,6 @@ class GetContextLogsRequest(TeaModel):
|
|
9538
9613
|
forward_lines: int = None,
|
9539
9614
|
pack_id: str = None,
|
9540
9615
|
pack_meta: str = None,
|
9541
|
-
type: str = None,
|
9542
9616
|
):
|
9543
9617
|
# The number of logs that you want to obtain and are generated before the generation time of the start log. Valid values: `(0,100]`.
|
9544
9618
|
#
|
@@ -9556,10 +9630,6 @@ class GetContextLogsRequest(TeaModel):
|
|
9556
9630
|
#
|
9557
9631
|
# This parameter is required.
|
9558
9632
|
self.pack_meta = pack_meta
|
9559
|
-
# The type of the data in the Logstore. Set the value to context_log.
|
9560
|
-
#
|
9561
|
-
# This parameter is required.
|
9562
|
-
self.type = type
|
9563
9633
|
|
9564
9634
|
def validate(self):
|
9565
9635
|
pass
|
@@ -9578,8 +9648,6 @@ class GetContextLogsRequest(TeaModel):
|
|
9578
9648
|
result['pack_id'] = self.pack_id
|
9579
9649
|
if self.pack_meta is not None:
|
9580
9650
|
result['pack_meta'] = self.pack_meta
|
9581
|
-
if self.type is not None:
|
9582
|
-
result['type'] = self.type
|
9583
9651
|
return result
|
9584
9652
|
|
9585
9653
|
def from_map(self, m: dict = None):
|
@@ -9592,8 +9660,6 @@ class GetContextLogsRequest(TeaModel):
|
|
9592
9660
|
self.pack_id = m.get('pack_id')
|
9593
9661
|
if m.get('pack_meta') is not None:
|
9594
9662
|
self.pack_meta = m.get('pack_meta')
|
9595
|
-
if m.get('type') is not None:
|
9596
|
-
self.type = m.get('type')
|
9597
9663
|
return self
|
9598
9664
|
|
9599
9665
|
|
@@ -10516,6 +10582,8 @@ class GetIndexResponseBodyLine(TeaModel):
|
|
10516
10582
|
# The included fields.
|
10517
10583
|
self.include_keys = include_keys
|
10518
10584
|
# The delimiters.
|
10585
|
+
#
|
10586
|
+
# This parameter is required.
|
10519
10587
|
self.token = token
|
10520
10588
|
|
10521
10589
|
def validate(self):
|
@@ -10558,7 +10626,7 @@ class GetIndexResponseBody(TeaModel):
|
|
10558
10626
|
def __init__(
|
10559
10627
|
self,
|
10560
10628
|
index_mode: str = None,
|
10561
|
-
keys: Dict[str,
|
10629
|
+
keys: Dict[str, IndexKey] = None,
|
10562
10630
|
last_modify_time: int = None,
|
10563
10631
|
line: GetIndexResponseBodyLine = None,
|
10564
10632
|
log_reduce: bool = None,
|
@@ -10587,6 +10655,8 @@ class GetIndexResponseBody(TeaModel):
|
|
10587
10655
|
# The storage type. The value is fixed as pg.
|
10588
10656
|
self.storage = storage
|
10589
10657
|
# The lifecycle of the index file. Valid values: 7, 30, and 90. Unit: day.
|
10658
|
+
#
|
10659
|
+
# This parameter is required.
|
10590
10660
|
self.ttl = ttl
|
10591
10661
|
|
10592
10662
|
def validate(self):
|
@@ -10634,7 +10704,7 @@ class GetIndexResponseBody(TeaModel):
|
|
10634
10704
|
self.keys = {}
|
10635
10705
|
if m.get('keys') is not None:
|
10636
10706
|
for k, v in m.get('keys').items():
|
10637
|
-
temp_model =
|
10707
|
+
temp_model = IndexKey()
|
10638
10708
|
self.keys[k] = temp_model.from_map(v)
|
10639
10709
|
if m.get('lastModifyTime') is not None:
|
10640
10710
|
self.last_modify_time = m.get('lastModifyTime')
|
@@ -10874,9 +10944,9 @@ class GetLogsRequest(TeaModel):
|
|
10874
10944
|
#
|
10875
10945
|
# This parameter is required.
|
10876
10946
|
self.from_ = from_
|
10877
|
-
# The maximum number of logs to return for the request. This parameter takes effect only when the query parameter is set to a search statement. Minimum value: 0. Maximum value: 100. Default value: 100.
|
10947
|
+
# The maximum number of logs to return for the request. This parameter takes effect only when the query parameter is set to a search statement. Minimum value: 0. Maximum value: 100. Default value: 100. For more information, see [Perform paged queries](https://help.aliyun.com/document_detail/89994.html).
|
10878
10948
|
self.line = line
|
10879
|
-
# The line from which the query starts. This parameter takes effect only when the query parameter is set to a search statement. Default value: 0.
|
10949
|
+
# The line from which the query starts. This parameter takes effect only when the query parameter is set to a search statement. Default value: 0. For more information, see [Perform paged queries](https://help.aliyun.com/document_detail/89994.html).
|
10880
10950
|
self.offset = offset
|
10881
10951
|
# Specifies whether to enable the Dedicated SQL feature. For more information, see [Enable Dedicated SQL](https://help.aliyun.com/document_detail/223777.html). Valid values:
|
10882
10952
|
#
|
@@ -10913,7 +10983,7 @@ class GetLogsRequest(TeaModel):
|
|
10913
10983
|
#
|
10914
10984
|
# This parameter is required.
|
10915
10985
|
self.to = to
|
10916
|
-
# The topic of the logs. The default value is
|
10986
|
+
# The topic of the logs. The default value is an empty string. For more information, see [Topic](https://help.aliyun.com/document_detail/48881.html).
|
10917
10987
|
self.topic = topic
|
10918
10988
|
|
10919
10989
|
def validate(self):
|
@@ -11010,7 +11080,10 @@ class GetLogsV2Headers(TeaModel):
|
|
11010
11080
|
accept_encoding: str = None,
|
11011
11081
|
):
|
11012
11082
|
self.common_headers = common_headers
|
11013
|
-
# The compression
|
11083
|
+
# The compression format.
|
11084
|
+
#
|
11085
|
+
# * For Java, Python, and Go, only the lz4 and gzip algorithms are supported for decompression.
|
11086
|
+
# * For PHP, JavaScript, and C#, only the gzip algorithm is supported for decompression.
|
11014
11087
|
#
|
11015
11088
|
# This parameter is required.
|
11016
11089
|
self.accept_encoding = accept_encoding
|
@@ -11658,14 +11731,23 @@ class GetMetricStoreResponseBody(TeaModel):
|
|
11658
11731
|
shard_count: int = None,
|
11659
11732
|
ttl: int = None,
|
11660
11733
|
):
|
11734
|
+
# Indicates whether the automatic sharding feature is enabled.
|
11661
11735
|
self.auto_split = auto_split
|
11736
|
+
# The creation time. The value is a UNIX timestamp.
|
11662
11737
|
self.create_time = create_time
|
11738
|
+
# The last update time. The value is a UNIX timestamp.
|
11663
11739
|
self.last_modify_time = last_modify_time
|
11740
|
+
# The maximum number of shards into which existing shards can be automatically split.
|
11664
11741
|
self.max_split_shard = max_split_shard
|
11742
|
+
# The metric type of the Metricstore. Example: prometheus.
|
11665
11743
|
self.metric_type = metric_type
|
11744
|
+
# The specification type of the Metricstore. Example: standard.
|
11666
11745
|
self.mode = mode
|
11746
|
+
# The name of the Metricstore.
|
11667
11747
|
self.name = name
|
11748
|
+
# The number of shards.
|
11668
11749
|
self.shard_count = shard_count
|
11750
|
+
# The retention period. Unit: days.
|
11669
11751
|
self.ttl = ttl
|
11670
11752
|
|
11671
11753
|
def validate(self):
|
@@ -13119,6 +13201,7 @@ class ListCollectionPoliciesResponseBodyDataCentralizeConfig(TeaModel):
|
|
13119
13201
|
self.dest_logstore = dest_logstore
|
13120
13202
|
self.dest_project = dest_project
|
13121
13203
|
self.dest_region = dest_region
|
13204
|
+
# The data retention period for centralized storage. Unit: days.
|
13122
13205
|
self.dest_ttl = dest_ttl
|
13123
13206
|
|
13124
13207
|
def validate(self):
|
@@ -13279,6 +13362,7 @@ class ListCollectionPoliciesResponseBodyData(TeaModel):
|
|
13279
13362
|
product_code: str = None,
|
13280
13363
|
resource_directory: ListCollectionPoliciesResponseBodyDataResourceDirectory = None,
|
13281
13364
|
):
|
13365
|
+
# The configuration for centralized storage.
|
13282
13366
|
self.centralize_config = centralize_config
|
13283
13367
|
self.centralize_enabled = centralize_enabled
|
13284
13368
|
self.data_code = data_code
|
@@ -13445,6 +13529,7 @@ class ListCollectionPoliciesResponseBody(TeaModel):
|
|
13445
13529
|
total_count: int = None,
|
13446
13530
|
):
|
13447
13531
|
self.current_count = current_count
|
13532
|
+
# The data of the policies that are matched against the query conditions. The data is returned based on paginated results.
|
13448
13533
|
self.data = data
|
13449
13534
|
self.statistics = statistics
|
13450
13535
|
self.total_count = total_count
|
@@ -15067,9 +15152,13 @@ class ListMetricStoresRequest(TeaModel):
|
|
15067
15152
|
offset: int = None,
|
15068
15153
|
size: int = None,
|
15069
15154
|
):
|
15155
|
+
# The type of the Metricstore. For example, you can set the parameter to standard to query Standard Metricstores.
|
15070
15156
|
self.mode = mode
|
15157
|
+
# The name of the Metricstore. Fuzzy search is supported. If you do not specify this parameter, all Metricstores are involved.
|
15071
15158
|
self.name = name
|
15159
|
+
# The start position of the query.
|
15072
15160
|
self.offset = offset
|
15161
|
+
# The number of entries per page.
|
15073
15162
|
self.size = size
|
15074
15163
|
|
15075
15164
|
def validate(self):
|
@@ -15111,8 +15200,11 @@ class ListMetricStoresResponseBody(TeaModel):
|
|
15111
15200
|
metricstores: List[str] = None,
|
15112
15201
|
total: int = None,
|
15113
15202
|
):
|
15203
|
+
# The total number of entries returned.
|
15114
15204
|
self.count = count
|
15205
|
+
# The names of the Metricstores.
|
15115
15206
|
self.metricstores = metricstores
|
15207
|
+
# The total number of queried Metricstores.
|
15116
15208
|
self.total = total
|
15117
15209
|
|
15118
15210
|
def validate(self):
|
@@ -16526,6 +16618,128 @@ class OpenSlsServiceResponse(TeaModel):
|
|
16526
16618
|
return self
|
16527
16619
|
|
16528
16620
|
|
16621
|
+
class PullLogsHeaders(TeaModel):
|
16622
|
+
def __init__(
|
16623
|
+
self,
|
16624
|
+
common_headers: Dict[str, str] = None,
|
16625
|
+
accept_encoding: str = None,
|
16626
|
+
):
|
16627
|
+
self.common_headers = common_headers
|
16628
|
+
self.accept_encoding = accept_encoding
|
16629
|
+
|
16630
|
+
def validate(self):
|
16631
|
+
pass
|
16632
|
+
|
16633
|
+
def to_map(self):
|
16634
|
+
_map = super().to_map()
|
16635
|
+
if _map is not None:
|
16636
|
+
return _map
|
16637
|
+
|
16638
|
+
result = dict()
|
16639
|
+
if self.common_headers is not None:
|
16640
|
+
result['commonHeaders'] = self.common_headers
|
16641
|
+
if self.accept_encoding is not None:
|
16642
|
+
result['Accept-Encoding'] = self.accept_encoding
|
16643
|
+
return result
|
16644
|
+
|
16645
|
+
def from_map(self, m: dict = None):
|
16646
|
+
m = m or dict()
|
16647
|
+
if m.get('commonHeaders') is not None:
|
16648
|
+
self.common_headers = m.get('commonHeaders')
|
16649
|
+
if m.get('Accept-Encoding') is not None:
|
16650
|
+
self.accept_encoding = m.get('Accept-Encoding')
|
16651
|
+
return self
|
16652
|
+
|
16653
|
+
|
16654
|
+
class PullLogsRequest(TeaModel):
|
16655
|
+
def __init__(
|
16656
|
+
self,
|
16657
|
+
count: int = None,
|
16658
|
+
cursor: str = None,
|
16659
|
+
end_cursor: str = None,
|
16660
|
+
query: str = None,
|
16661
|
+
):
|
16662
|
+
# This parameter is required.
|
16663
|
+
self.count = count
|
16664
|
+
# This parameter is required.
|
16665
|
+
self.cursor = cursor
|
16666
|
+
self.end_cursor = end_cursor
|
16667
|
+
# The SPL statement that is used to filter data. For more information, see [SPL instructions](https://help.aliyun.com/document_detail/2536530.html).
|
16668
|
+
self.query = query
|
16669
|
+
|
16670
|
+
def validate(self):
|
16671
|
+
pass
|
16672
|
+
|
16673
|
+
def to_map(self):
|
16674
|
+
_map = super().to_map()
|
16675
|
+
if _map is not None:
|
16676
|
+
return _map
|
16677
|
+
|
16678
|
+
result = dict()
|
16679
|
+
if self.count is not None:
|
16680
|
+
result['count'] = self.count
|
16681
|
+
if self.cursor is not None:
|
16682
|
+
result['cursor'] = self.cursor
|
16683
|
+
if self.end_cursor is not None:
|
16684
|
+
result['end_cursor'] = self.end_cursor
|
16685
|
+
if self.query is not None:
|
16686
|
+
result['query'] = self.query
|
16687
|
+
return result
|
16688
|
+
|
16689
|
+
def from_map(self, m: dict = None):
|
16690
|
+
m = m or dict()
|
16691
|
+
if m.get('count') is not None:
|
16692
|
+
self.count = m.get('count')
|
16693
|
+
if m.get('cursor') is not None:
|
16694
|
+
self.cursor = m.get('cursor')
|
16695
|
+
if m.get('end_cursor') is not None:
|
16696
|
+
self.end_cursor = m.get('end_cursor')
|
16697
|
+
if m.get('query') is not None:
|
16698
|
+
self.query = m.get('query')
|
16699
|
+
return self
|
16700
|
+
|
16701
|
+
|
16702
|
+
class PullLogsResponse(TeaModel):
|
16703
|
+
def __init__(
|
16704
|
+
self,
|
16705
|
+
headers: Dict[str, str] = None,
|
16706
|
+
status_code: int = None,
|
16707
|
+
body: LogGroupList = None,
|
16708
|
+
):
|
16709
|
+
self.headers = headers
|
16710
|
+
self.status_code = status_code
|
16711
|
+
self.body = body
|
16712
|
+
|
16713
|
+
def validate(self):
|
16714
|
+
if self.body:
|
16715
|
+
self.body.validate()
|
16716
|
+
|
16717
|
+
def to_map(self):
|
16718
|
+
_map = super().to_map()
|
16719
|
+
if _map is not None:
|
16720
|
+
return _map
|
16721
|
+
|
16722
|
+
result = dict()
|
16723
|
+
if self.headers is not None:
|
16724
|
+
result['headers'] = self.headers
|
16725
|
+
if self.status_code is not None:
|
16726
|
+
result['statusCode'] = self.status_code
|
16727
|
+
if self.body is not None:
|
16728
|
+
result['body'] = self.body.to_map()
|
16729
|
+
return result
|
16730
|
+
|
16731
|
+
def from_map(self, m: dict = None):
|
16732
|
+
m = m or dict()
|
16733
|
+
if m.get('headers') is not None:
|
16734
|
+
self.headers = m.get('headers')
|
16735
|
+
if m.get('statusCode') is not None:
|
16736
|
+
self.status_code = m.get('statusCode')
|
16737
|
+
if m.get('body') is not None:
|
16738
|
+
temp_model = LogGroupList()
|
16739
|
+
self.body = temp_model.from_map(m['body'])
|
16740
|
+
return self
|
16741
|
+
|
16742
|
+
|
16529
16743
|
class PutAnnotationDataRequest(TeaModel):
|
16530
16744
|
def __init__(
|
16531
16745
|
self,
|
@@ -16603,17 +16817,54 @@ class PutAnnotationDataResponse(TeaModel):
|
|
16603
16817
|
return self
|
16604
16818
|
|
16605
16819
|
|
16606
|
-
class
|
16820
|
+
class PutLogsHeaders(TeaModel):
|
16607
16821
|
def __init__(
|
16608
16822
|
self,
|
16609
|
-
|
16823
|
+
common_headers: Dict[str, str] = None,
|
16824
|
+
x_log_compresstype: str = None,
|
16610
16825
|
):
|
16611
|
-
|
16612
|
-
|
16826
|
+
self.common_headers = common_headers
|
16827
|
+
# The compression format. lz4 and gzip are supported.
|
16828
|
+
#
|
16829
|
+
# This parameter is required.
|
16830
|
+
self.x_log_compresstype = x_log_compresstype
|
16613
16831
|
|
16614
16832
|
def validate(self):
|
16615
16833
|
pass
|
16616
16834
|
|
16835
|
+
def to_map(self):
|
16836
|
+
_map = super().to_map()
|
16837
|
+
if _map is not None:
|
16838
|
+
return _map
|
16839
|
+
|
16840
|
+
result = dict()
|
16841
|
+
if self.common_headers is not None:
|
16842
|
+
result['commonHeaders'] = self.common_headers
|
16843
|
+
if self.x_log_compresstype is not None:
|
16844
|
+
result['x-log-compresstype'] = self.x_log_compresstype
|
16845
|
+
return result
|
16846
|
+
|
16847
|
+
def from_map(self, m: dict = None):
|
16848
|
+
m = m or dict()
|
16849
|
+
if m.get('commonHeaders') is not None:
|
16850
|
+
self.common_headers = m.get('commonHeaders')
|
16851
|
+
if m.get('x-log-compresstype') is not None:
|
16852
|
+
self.x_log_compresstype = m.get('x-log-compresstype')
|
16853
|
+
return self
|
16854
|
+
|
16855
|
+
|
16856
|
+
class PutLogsRequest(TeaModel):
|
16857
|
+
def __init__(
|
16858
|
+
self,
|
16859
|
+
body: LogGroup = None,
|
16860
|
+
):
|
16861
|
+
# The compressed Protobuf data.
|
16862
|
+
self.body = body
|
16863
|
+
|
16864
|
+
def validate(self):
|
16865
|
+
if self.body:
|
16866
|
+
self.body.validate()
|
16867
|
+
|
16617
16868
|
def to_map(self):
|
16618
16869
|
_map = super().to_map()
|
16619
16870
|
if _map is not None:
|
@@ -16621,17 +16872,18 @@ class PutProjectPolicyRequest(TeaModel):
|
|
16621
16872
|
|
16622
16873
|
result = dict()
|
16623
16874
|
if self.body is not None:
|
16624
|
-
result['body'] = self.body
|
16875
|
+
result['body'] = self.body.to_map()
|
16625
16876
|
return result
|
16626
16877
|
|
16627
16878
|
def from_map(self, m: dict = None):
|
16628
16879
|
m = m or dict()
|
16629
16880
|
if m.get('body') is not None:
|
16630
|
-
|
16881
|
+
temp_model = LogGroup()
|
16882
|
+
self.body = temp_model.from_map(m['body'])
|
16631
16883
|
return self
|
16632
16884
|
|
16633
16885
|
|
16634
|
-
class
|
16886
|
+
class PutLogsResponse(TeaModel):
|
16635
16887
|
def __init__(
|
16636
16888
|
self,
|
16637
16889
|
headers: Dict[str, str] = None,
|
@@ -16664,13 +16916,13 @@ class PutProjectPolicyResponse(TeaModel):
|
|
16664
16916
|
return self
|
16665
16917
|
|
16666
16918
|
|
16667
|
-
class
|
16919
|
+
class PutProjectPolicyRequest(TeaModel):
|
16668
16920
|
def __init__(
|
16669
16921
|
self,
|
16670
|
-
|
16922
|
+
body: str = None,
|
16671
16923
|
):
|
16672
|
-
#
|
16673
|
-
self.
|
16924
|
+
# The project policy.
|
16925
|
+
self.body = body
|
16674
16926
|
|
16675
16927
|
def validate(self):
|
16676
16928
|
pass
|
@@ -16681,18 +16933,18 @@ class PutProjectTransferAccelerationRequest(TeaModel):
|
|
16681
16933
|
return _map
|
16682
16934
|
|
16683
16935
|
result = dict()
|
16684
|
-
if self.
|
16685
|
-
result['
|
16936
|
+
if self.body is not None:
|
16937
|
+
result['body'] = self.body
|
16686
16938
|
return result
|
16687
16939
|
|
16688
16940
|
def from_map(self, m: dict = None):
|
16689
16941
|
m = m or dict()
|
16690
|
-
if m.get('
|
16691
|
-
self.
|
16942
|
+
if m.get('body') is not None:
|
16943
|
+
self.body = m.get('body')
|
16692
16944
|
return self
|
16693
16945
|
|
16694
16946
|
|
16695
|
-
class
|
16947
|
+
class PutProjectPolicyResponse(TeaModel):
|
16696
16948
|
def __init__(
|
16697
16949
|
self,
|
16698
16950
|
headers: Dict[str, str] = None,
|
@@ -16725,28 +16977,13 @@ class PutProjectTransferAccelerationResponse(TeaModel):
|
|
16725
16977
|
return self
|
16726
16978
|
|
16727
16979
|
|
16728
|
-
class
|
16980
|
+
class PutProjectTransferAccelerationRequest(TeaModel):
|
16729
16981
|
def __init__(
|
16730
16982
|
self,
|
16731
|
-
|
16732
|
-
source: str = None,
|
16733
|
-
tags: Dict[str, str] = None,
|
16734
|
-
topic: str = None,
|
16983
|
+
enabled: bool = None,
|
16735
16984
|
):
|
16736
|
-
# The logs. Each element is a JSON object that indicates a log.
|
16737
|
-
#
|
16738
|
-
# > **Note**: The time in a log that is collected by using the web tracking feature is the time at which Simple Log Service receives the log. You do not need to configure the __time__ field for each log. If this field exists, it is overwritten by the time at which Simple Log Service receives the log.
|
16739
|
-
#
|
16740
|
-
# This parameter is required.
|
16741
|
-
self.logs = logs
|
16742
|
-
# The source of the logs.
|
16743
|
-
#
|
16744
16985
|
# This parameter is required.
|
16745
|
-
self.
|
16746
|
-
# The tags of the logs.
|
16747
|
-
self.tags = tags
|
16748
|
-
# The topic of the logs.
|
16749
|
-
self.topic = topic
|
16986
|
+
self.enabled = enabled
|
16750
16987
|
|
16751
16988
|
def validate(self):
|
16752
16989
|
pass
|
@@ -16757,30 +16994,18 @@ class PutWebtrackingRequest(TeaModel):
|
|
16757
16994
|
return _map
|
16758
16995
|
|
16759
16996
|
result = dict()
|
16760
|
-
if self.
|
16761
|
-
result['
|
16762
|
-
if self.source is not None:
|
16763
|
-
result['__source__'] = self.source
|
16764
|
-
if self.tags is not None:
|
16765
|
-
result['__tags__'] = self.tags
|
16766
|
-
if self.topic is not None:
|
16767
|
-
result['__topic__'] = self.topic
|
16997
|
+
if self.enabled is not None:
|
16998
|
+
result['enabled'] = self.enabled
|
16768
16999
|
return result
|
16769
17000
|
|
16770
17001
|
def from_map(self, m: dict = None):
|
16771
17002
|
m = m or dict()
|
16772
|
-
if m.get('
|
16773
|
-
self.
|
16774
|
-
if m.get('__source__') is not None:
|
16775
|
-
self.source = m.get('__source__')
|
16776
|
-
if m.get('__tags__') is not None:
|
16777
|
-
self.tags = m.get('__tags__')
|
16778
|
-
if m.get('__topic__') is not None:
|
16779
|
-
self.topic = m.get('__topic__')
|
17003
|
+
if m.get('enabled') is not None:
|
17004
|
+
self.enabled = m.get('enabled')
|
16780
17005
|
return self
|
16781
17006
|
|
16782
17007
|
|
16783
|
-
class
|
17008
|
+
class PutProjectTransferAccelerationResponse(TeaModel):
|
16784
17009
|
def __init__(
|
16785
17010
|
self,
|
16786
17011
|
headers: Dict[str, str] = None,
|
@@ -16813,49 +17038,28 @@ class PutWebtrackingResponse(TeaModel):
|
|
16813
17038
|
return self
|
16814
17039
|
|
16815
17040
|
|
16816
|
-
class
|
16817
|
-
def __init__(
|
16818
|
-
self,
|
16819
|
-
allow_builtin: bool = None,
|
16820
|
-
body: MLServiceAnalysisParam = None,
|
16821
|
-
):
|
16822
|
-
self.allow_builtin = allow_builtin
|
16823
|
-
self.body = body
|
16824
|
-
|
16825
|
-
def validate(self):
|
16826
|
-
if self.body:
|
16827
|
-
self.body.validate()
|
16828
|
-
|
16829
|
-
def to_map(self):
|
16830
|
-
_map = super().to_map()
|
16831
|
-
if _map is not None:
|
16832
|
-
return _map
|
16833
|
-
|
16834
|
-
result = dict()
|
16835
|
-
if self.allow_builtin is not None:
|
16836
|
-
result['allowBuiltin'] = self.allow_builtin
|
16837
|
-
if self.body is not None:
|
16838
|
-
result['body'] = self.body.to_map()
|
16839
|
-
return result
|
16840
|
-
|
16841
|
-
def from_map(self, m: dict = None):
|
16842
|
-
m = m or dict()
|
16843
|
-
if m.get('allowBuiltin') is not None:
|
16844
|
-
self.allow_builtin = m.get('allowBuiltin')
|
16845
|
-
if m.get('body') is not None:
|
16846
|
-
temp_model = MLServiceAnalysisParam()
|
16847
|
-
self.body = temp_model.from_map(m['body'])
|
16848
|
-
return self
|
16849
|
-
|
16850
|
-
|
16851
|
-
class QueryMLServiceResultsResponseBody(TeaModel):
|
17041
|
+
class PutWebtrackingRequest(TeaModel):
|
16852
17042
|
def __init__(
|
16853
17043
|
self,
|
16854
|
-
|
16855
|
-
|
17044
|
+
logs: List[Dict[str, str]] = None,
|
17045
|
+
source: str = None,
|
17046
|
+
tags: Dict[str, str] = None,
|
17047
|
+
topic: str = None,
|
16856
17048
|
):
|
16857
|
-
|
16858
|
-
|
17049
|
+
# The logs. Each element is a JSON object that indicates a log.
|
17050
|
+
#
|
17051
|
+
# > **Note**: The time in a log that is collected by using the web tracking feature is the time at which Simple Log Service receives the log. You do not need to configure the __time__ field for each log. If this field exists, it is overwritten by the time at which Simple Log Service receives the log.
|
17052
|
+
#
|
17053
|
+
# This parameter is required.
|
17054
|
+
self.logs = logs
|
17055
|
+
# The source of the logs.
|
17056
|
+
#
|
17057
|
+
# This parameter is required.
|
17058
|
+
self.source = source
|
17059
|
+
# The tags of the logs.
|
17060
|
+
self.tags = tags
|
17061
|
+
# The topic of the logs.
|
17062
|
+
self.topic = topic
|
16859
17063
|
|
16860
17064
|
def validate(self):
|
16861
17065
|
pass
|
@@ -16866,35 +17070,40 @@ class QueryMLServiceResultsResponseBody(TeaModel):
|
|
16866
17070
|
return _map
|
16867
17071
|
|
16868
17072
|
result = dict()
|
16869
|
-
if self.
|
16870
|
-
result['
|
16871
|
-
if self.
|
16872
|
-
result['
|
17073
|
+
if self.logs is not None:
|
17074
|
+
result['__logs__'] = self.logs
|
17075
|
+
if self.source is not None:
|
17076
|
+
result['__source__'] = self.source
|
17077
|
+
if self.tags is not None:
|
17078
|
+
result['__tags__'] = self.tags
|
17079
|
+
if self.topic is not None:
|
17080
|
+
result['__topic__'] = self.topic
|
16873
17081
|
return result
|
16874
17082
|
|
16875
17083
|
def from_map(self, m: dict = None):
|
16876
17084
|
m = m or dict()
|
16877
|
-
if m.get('
|
16878
|
-
self.
|
16879
|
-
if m.get('
|
16880
|
-
self.
|
17085
|
+
if m.get('__logs__') is not None:
|
17086
|
+
self.logs = m.get('__logs__')
|
17087
|
+
if m.get('__source__') is not None:
|
17088
|
+
self.source = m.get('__source__')
|
17089
|
+
if m.get('__tags__') is not None:
|
17090
|
+
self.tags = m.get('__tags__')
|
17091
|
+
if m.get('__topic__') is not None:
|
17092
|
+
self.topic = m.get('__topic__')
|
16881
17093
|
return self
|
16882
17094
|
|
16883
17095
|
|
16884
|
-
class
|
17096
|
+
class PutWebtrackingResponse(TeaModel):
|
16885
17097
|
def __init__(
|
16886
17098
|
self,
|
16887
17099
|
headers: Dict[str, str] = None,
|
16888
17100
|
status_code: int = None,
|
16889
|
-
body: QueryMLServiceResultsResponseBody = None,
|
16890
17101
|
):
|
16891
17102
|
self.headers = headers
|
16892
17103
|
self.status_code = status_code
|
16893
|
-
self.body = body
|
16894
17104
|
|
16895
17105
|
def validate(self):
|
16896
|
-
|
16897
|
-
self.body.validate()
|
17106
|
+
pass
|
16898
17107
|
|
16899
17108
|
def to_map(self):
|
16900
17109
|
_map = super().to_map()
|
@@ -16906,8 +17115,6 @@ class QueryMLServiceResultsResponse(TeaModel):
|
|
16906
17115
|
result['headers'] = self.headers
|
16907
17116
|
if self.status_code is not None:
|
16908
17117
|
result['statusCode'] = self.status_code
|
16909
|
-
if self.body is not None:
|
16910
|
-
result['body'] = self.body.to_map()
|
16911
17118
|
return result
|
16912
17119
|
|
16913
17120
|
def from_map(self, m: dict = None):
|
@@ -16916,9 +17123,6 @@ class QueryMLServiceResultsResponse(TeaModel):
|
|
16916
17123
|
self.headers = m.get('headers')
|
16917
17124
|
if m.get('statusCode') is not None:
|
16918
17125
|
self.status_code = m.get('statusCode')
|
16919
|
-
if m.get('body') is not None:
|
16920
|
-
temp_model = QueryMLServiceResultsResponseBody()
|
16921
|
-
self.body = temp_model.from_map(m['body'])
|
16922
17126
|
return self
|
16923
17127
|
|
16924
17128
|
|
@@ -16929,6 +17133,7 @@ class RefreshTokenRequest(TeaModel):
|
|
16929
17133
|
ticket: str = None,
|
16930
17134
|
):
|
16931
17135
|
self.access_token_expiration_time = access_token_expiration_time
|
17136
|
+
# The ticket that is used for logon-free access.
|
16932
17137
|
self.ticket = ticket
|
16933
17138
|
|
16934
17139
|
def validate(self):
|
@@ -17998,7 +18203,7 @@ class UpdateDashboardRequest(TeaModel):
|
|
17998
18203
|
description: str = None,
|
17999
18204
|
display_name: str = None,
|
18000
18205
|
):
|
18001
|
-
# The
|
18206
|
+
# The attribute values of the dashboard.
|
18002
18207
|
self.attribute = attribute
|
18003
18208
|
# The charts on the dashboard.
|
18004
18209
|
#
|
@@ -18168,109 +18373,17 @@ class UpdateETLResponse(TeaModel):
|
|
18168
18373
|
return self
|
18169
18374
|
|
18170
18375
|
|
18171
|
-
class UpdateIndexRequestLine(TeaModel):
|
18172
|
-
def __init__(
|
18173
|
-
self,
|
18174
|
-
case_sensitive: bool = None,
|
18175
|
-
chn: bool = None,
|
18176
|
-
exclude_keys: List[str] = None,
|
18177
|
-
include_keys: List[str] = None,
|
18178
|
-
token: List[str] = None,
|
18179
|
-
):
|
18180
|
-
# Specifies whether to enable case sensitivity. Valid values:
|
18181
|
-
#
|
18182
|
-
# * true
|
18183
|
-
# * false
|
18184
|
-
#
|
18185
|
-
# This parameter is required.
|
18186
|
-
self.case_sensitive = case_sensitive
|
18187
|
-
# Specifies whether to include Chinese characters. Valid values:
|
18188
|
-
#
|
18189
|
-
# * true
|
18190
|
-
# * false
|
18191
|
-
#
|
18192
|
-
# This parameter is required.
|
18193
|
-
self.chn = chn
|
18194
|
-
# The excluded fields. You cannot specify both include_keys and exclude_keys.
|
18195
|
-
self.exclude_keys = exclude_keys
|
18196
|
-
# The included fields. You cannot specify both include_keys and exclude_keys.
|
18197
|
-
self.include_keys = include_keys
|
18198
|
-
# The delimiters that are used to split text.
|
18199
|
-
#
|
18200
|
-
# This parameter is required.
|
18201
|
-
self.token = token
|
18202
|
-
|
18203
|
-
def validate(self):
|
18204
|
-
pass
|
18205
|
-
|
18206
|
-
def to_map(self):
|
18207
|
-
_map = super().to_map()
|
18208
|
-
if _map is not None:
|
18209
|
-
return _map
|
18210
|
-
|
18211
|
-
result = dict()
|
18212
|
-
if self.case_sensitive is not None:
|
18213
|
-
result['caseSensitive'] = self.case_sensitive
|
18214
|
-
if self.chn is not None:
|
18215
|
-
result['chn'] = self.chn
|
18216
|
-
if self.exclude_keys is not None:
|
18217
|
-
result['exclude_keys'] = self.exclude_keys
|
18218
|
-
if self.include_keys is not None:
|
18219
|
-
result['include_keys'] = self.include_keys
|
18220
|
-
if self.token is not None:
|
18221
|
-
result['token'] = self.token
|
18222
|
-
return result
|
18223
|
-
|
18224
|
-
def from_map(self, m: dict = None):
|
18225
|
-
m = m or dict()
|
18226
|
-
if m.get('caseSensitive') is not None:
|
18227
|
-
self.case_sensitive = m.get('caseSensitive')
|
18228
|
-
if m.get('chn') is not None:
|
18229
|
-
self.chn = m.get('chn')
|
18230
|
-
if m.get('exclude_keys') is not None:
|
18231
|
-
self.exclude_keys = m.get('exclude_keys')
|
18232
|
-
if m.get('include_keys') is not None:
|
18233
|
-
self.include_keys = m.get('include_keys')
|
18234
|
-
if m.get('token') is not None:
|
18235
|
-
self.token = m.get('token')
|
18236
|
-
return self
|
18237
|
-
|
18238
|
-
|
18239
18376
|
class UpdateIndexRequest(TeaModel):
|
18240
18377
|
def __init__(
|
18241
18378
|
self,
|
18242
|
-
|
18243
|
-
line: UpdateIndexRequestLine = None,
|
18244
|
-
log_reduce: bool = None,
|
18245
|
-
log_reduce_black_list: List[str] = None,
|
18246
|
-
log_reduce_white_list: List[str] = None,
|
18247
|
-
max_text_len: int = None,
|
18248
|
-
ttl: int = None,
|
18379
|
+
body: Index = None,
|
18249
18380
|
):
|
18250
|
-
# The
|
18251
|
-
self.
|
18252
|
-
# The configuration of full-text indexes.
|
18253
|
-
self.line = line
|
18254
|
-
# Specifies whether to turn on LogReduce. If you turn on LogReduce, only one of `log_reduce_white_list` and `log_reduce_black_list` takes effect.
|
18255
|
-
self.log_reduce = log_reduce
|
18256
|
-
# The fields in the blacklist that you want to use to cluster logs.
|
18257
|
-
self.log_reduce_black_list = log_reduce_black_list
|
18258
|
-
# The fields in the whitelist that you want to use to cluster logs.
|
18259
|
-
self.log_reduce_white_list = log_reduce_white_list
|
18260
|
-
# The maximum length of a field value that can be retained.
|
18261
|
-
self.max_text_len = max_text_len
|
18262
|
-
# The retention period of data. Unit: days. Valid values: 7, 30, and 90.
|
18263
|
-
#
|
18264
|
-
# This parameter is required.
|
18265
|
-
self.ttl = ttl
|
18381
|
+
# The request body.
|
18382
|
+
self.body = body
|
18266
18383
|
|
18267
18384
|
def validate(self):
|
18268
|
-
if self.
|
18269
|
-
|
18270
|
-
if v:
|
18271
|
-
v.validate()
|
18272
|
-
if self.line:
|
18273
|
-
self.line.validate()
|
18385
|
+
if self.body:
|
18386
|
+
self.body.validate()
|
18274
18387
|
|
18275
18388
|
def to_map(self):
|
18276
18389
|
_map = super().to_map()
|
@@ -18278,44 +18391,15 @@ class UpdateIndexRequest(TeaModel):
|
|
18278
18391
|
return _map
|
18279
18392
|
|
18280
18393
|
result = dict()
|
18281
|
-
|
18282
|
-
|
18283
|
-
for k, v in self.keys.items():
|
18284
|
-
result['keys'][k] = v.to_map()
|
18285
|
-
if self.line is not None:
|
18286
|
-
result['line'] = self.line.to_map()
|
18287
|
-
if self.log_reduce is not None:
|
18288
|
-
result['log_reduce'] = self.log_reduce
|
18289
|
-
if self.log_reduce_black_list is not None:
|
18290
|
-
result['log_reduce_black_list'] = self.log_reduce_black_list
|
18291
|
-
if self.log_reduce_white_list is not None:
|
18292
|
-
result['log_reduce_white_list'] = self.log_reduce_white_list
|
18293
|
-
if self.max_text_len is not None:
|
18294
|
-
result['max_text_len'] = self.max_text_len
|
18295
|
-
if self.ttl is not None:
|
18296
|
-
result['ttl'] = self.ttl
|
18394
|
+
if self.body is not None:
|
18395
|
+
result['body'] = self.body.to_map()
|
18297
18396
|
return result
|
18298
18397
|
|
18299
18398
|
def from_map(self, m: dict = None):
|
18300
18399
|
m = m or dict()
|
18301
|
-
|
18302
|
-
|
18303
|
-
|
18304
|
-
temp_model = KeysValue()
|
18305
|
-
self.keys[k] = temp_model.from_map(v)
|
18306
|
-
if m.get('line') is not None:
|
18307
|
-
temp_model = UpdateIndexRequestLine()
|
18308
|
-
self.line = temp_model.from_map(m['line'])
|
18309
|
-
if m.get('log_reduce') is not None:
|
18310
|
-
self.log_reduce = m.get('log_reduce')
|
18311
|
-
if m.get('log_reduce_black_list') is not None:
|
18312
|
-
self.log_reduce_black_list = m.get('log_reduce_black_list')
|
18313
|
-
if m.get('log_reduce_white_list') is not None:
|
18314
|
-
self.log_reduce_white_list = m.get('log_reduce_white_list')
|
18315
|
-
if m.get('max_text_len') is not None:
|
18316
|
-
self.max_text_len = m.get('max_text_len')
|
18317
|
-
if m.get('ttl') is not None:
|
18318
|
-
self.ttl = m.get('ttl')
|
18400
|
+
if m.get('body') is not None:
|
18401
|
+
temp_model = Index()
|
18402
|
+
self.body = temp_model.from_map(m['body'])
|
18319
18403
|
return self
|
18320
18404
|
|
18321
18405
|
|
@@ -18364,7 +18448,6 @@ class UpdateLogStoreRequest(TeaModel):
|
|
18364
18448
|
logstore_name: str = None,
|
18365
18449
|
max_split_shard: int = None,
|
18366
18450
|
mode: str = None,
|
18367
|
-
processor_id: str = None,
|
18368
18451
|
shard_count: int = None,
|
18369
18452
|
telemetry_type: str = None,
|
18370
18453
|
ttl: int = None,
|
@@ -18403,7 +18486,6 @@ class UpdateLogStoreRequest(TeaModel):
|
|
18403
18486
|
# * **standard**: Standard Logstore. This type of Logstore supports the log analysis feature and is suitable for scenarios such as real-time monitoring and interactive analysis. You can also use this type of Logstore to build a comprehensive observability system.
|
18404
18487
|
# * **query**: Query Logstore. This type of Logstore supports high-performance queries. The index traffic fee of a Query Logstore is approximately half that of a Standard Logstore. Query Logstores do not support SQL analysis. Query Logstores are suitable for scenarios in which the amount of data is large, the log retention period is long, or log analysis is not required. If logs are stored for weeks or months, the log retention period is considered long.
|
18405
18488
|
self.mode = mode
|
18406
|
-
self.processor_id = processor_id
|
18407
18489
|
# The number of shards.
|
18408
18490
|
#
|
18409
18491
|
# > You cannot call the UpdateLogStore operation to change the number of shards. You can call the SplitShard or MergeShards operation to change the number of shards.
|
@@ -18446,8 +18528,6 @@ class UpdateLogStoreRequest(TeaModel):
|
|
18446
18528
|
result['maxSplitShard'] = self.max_split_shard
|
18447
18529
|
if self.mode is not None:
|
18448
18530
|
result['mode'] = self.mode
|
18449
|
-
if self.processor_id is not None:
|
18450
|
-
result['processorId'] = self.processor_id
|
18451
18531
|
if self.shard_count is not None:
|
18452
18532
|
result['shardCount'] = self.shard_count
|
18453
18533
|
if self.telemetry_type is not None:
|
@@ -18477,8 +18557,6 @@ class UpdateLogStoreRequest(TeaModel):
|
|
18477
18557
|
self.max_split_shard = m.get('maxSplitShard')
|
18478
18558
|
if m.get('mode') is not None:
|
18479
18559
|
self.mode = m.get('mode')
|
18480
|
-
if m.get('processorId') is not None:
|
18481
|
-
self.processor_id = m.get('processorId')
|
18482
18560
|
if m.get('shardCount') is not None:
|
18483
18561
|
self.shard_count = m.get('shardCount')
|
18484
18562
|
if m.get('telemetryType') is not None:
|
@@ -18521,6 +18599,120 @@ class UpdateLogStoreResponse(TeaModel):
|
|
18521
18599
|
return self
|
18522
18600
|
|
18523
18601
|
|
18602
|
+
class UpdateLogStoreEncryptionRequestUserCmkInfo(TeaModel):
|
18603
|
+
def __init__(
|
18604
|
+
self,
|
18605
|
+
key_id: str = None,
|
18606
|
+
region_id: str = None,
|
18607
|
+
role_arn: str = None,
|
18608
|
+
):
|
18609
|
+
self.key_id = key_id
|
18610
|
+
self.region_id = region_id
|
18611
|
+
self.role_arn = role_arn
|
18612
|
+
|
18613
|
+
def validate(self):
|
18614
|
+
pass
|
18615
|
+
|
18616
|
+
def to_map(self):
|
18617
|
+
_map = super().to_map()
|
18618
|
+
if _map is not None:
|
18619
|
+
return _map
|
18620
|
+
|
18621
|
+
result = dict()
|
18622
|
+
if self.key_id is not None:
|
18623
|
+
result['keyId'] = self.key_id
|
18624
|
+
if self.region_id is not None:
|
18625
|
+
result['regionId'] = self.region_id
|
18626
|
+
if self.role_arn is not None:
|
18627
|
+
result['roleArn'] = self.role_arn
|
18628
|
+
return result
|
18629
|
+
|
18630
|
+
def from_map(self, m: dict = None):
|
18631
|
+
m = m or dict()
|
18632
|
+
if m.get('keyId') is not None:
|
18633
|
+
self.key_id = m.get('keyId')
|
18634
|
+
if m.get('regionId') is not None:
|
18635
|
+
self.region_id = m.get('regionId')
|
18636
|
+
if m.get('roleArn') is not None:
|
18637
|
+
self.role_arn = m.get('roleArn')
|
18638
|
+
return self
|
18639
|
+
|
18640
|
+
|
18641
|
+
class UpdateLogStoreEncryptionRequest(TeaModel):
|
18642
|
+
def __init__(
|
18643
|
+
self,
|
18644
|
+
enable: bool = None,
|
18645
|
+
encrypt_type: str = None,
|
18646
|
+
user_cmk_info: UpdateLogStoreEncryptionRequestUserCmkInfo = None,
|
18647
|
+
):
|
18648
|
+
# This parameter is required.
|
18649
|
+
self.enable = enable
|
18650
|
+
self.encrypt_type = encrypt_type
|
18651
|
+
self.user_cmk_info = user_cmk_info
|
18652
|
+
|
18653
|
+
def validate(self):
|
18654
|
+
if self.user_cmk_info:
|
18655
|
+
self.user_cmk_info.validate()
|
18656
|
+
|
18657
|
+
def to_map(self):
|
18658
|
+
_map = super().to_map()
|
18659
|
+
if _map is not None:
|
18660
|
+
return _map
|
18661
|
+
|
18662
|
+
result = dict()
|
18663
|
+
if self.enable is not None:
|
18664
|
+
result['enable'] = self.enable
|
18665
|
+
if self.encrypt_type is not None:
|
18666
|
+
result['encryptType'] = self.encrypt_type
|
18667
|
+
if self.user_cmk_info is not None:
|
18668
|
+
result['userCmkInfo'] = self.user_cmk_info.to_map()
|
18669
|
+
return result
|
18670
|
+
|
18671
|
+
def from_map(self, m: dict = None):
|
18672
|
+
m = m or dict()
|
18673
|
+
if m.get('enable') is not None:
|
18674
|
+
self.enable = m.get('enable')
|
18675
|
+
if m.get('encryptType') is not None:
|
18676
|
+
self.encrypt_type = m.get('encryptType')
|
18677
|
+
if m.get('userCmkInfo') is not None:
|
18678
|
+
temp_model = UpdateLogStoreEncryptionRequestUserCmkInfo()
|
18679
|
+
self.user_cmk_info = temp_model.from_map(m['userCmkInfo'])
|
18680
|
+
return self
|
18681
|
+
|
18682
|
+
|
18683
|
+
class UpdateLogStoreEncryptionResponse(TeaModel):
|
18684
|
+
def __init__(
|
18685
|
+
self,
|
18686
|
+
headers: Dict[str, str] = None,
|
18687
|
+
status_code: int = None,
|
18688
|
+
):
|
18689
|
+
self.headers = headers
|
18690
|
+
self.status_code = status_code
|
18691
|
+
|
18692
|
+
def validate(self):
|
18693
|
+
pass
|
18694
|
+
|
18695
|
+
def to_map(self):
|
18696
|
+
_map = super().to_map()
|
18697
|
+
if _map is not None:
|
18698
|
+
return _map
|
18699
|
+
|
18700
|
+
result = dict()
|
18701
|
+
if self.headers is not None:
|
18702
|
+
result['headers'] = self.headers
|
18703
|
+
if self.status_code is not None:
|
18704
|
+
result['statusCode'] = self.status_code
|
18705
|
+
return result
|
18706
|
+
|
18707
|
+
def from_map(self, m: dict = None):
|
18708
|
+
m = m or dict()
|
18709
|
+
if m.get('headers') is not None:
|
18710
|
+
self.headers = m.get('headers')
|
18711
|
+
if m.get('statusCode') is not None:
|
18712
|
+
self.status_code = m.get('statusCode')
|
18713
|
+
return self
|
18714
|
+
|
18715
|
+
|
18524
18716
|
class UpdateLogStoreMeteringModeRequest(TeaModel):
|
18525
18717
|
def __init__(
|
18526
18718
|
self,
|
@@ -18724,24 +18916,49 @@ class UpdateLogtailPipelineConfigRequest(TeaModel):
|
|
18724
18916
|
processors: List[Dict[str, Any]] = None,
|
18725
18917
|
):
|
18726
18918
|
# The aggregation plug-ins.
|
18919
|
+
#
|
18920
|
+
# > This parameter takes effect only when extended plug-ins are used. You can use only one aggregation plug-in.
|
18727
18921
|
self.aggregators = aggregators
|
18728
18922
|
# The name of the configuration.
|
18729
18923
|
#
|
18924
|
+
# > The value of this parameter must be the same as the value of configName in the outer layer.
|
18925
|
+
#
|
18730
18926
|
# This parameter is required.
|
18731
18927
|
self.config_name = config_name
|
18732
|
-
# The
|
18928
|
+
# The output plug-ins.
|
18929
|
+
#
|
18930
|
+
# > You can use only one Simple Log Service output plug-in.
|
18733
18931
|
#
|
18734
18932
|
# This parameter is required.
|
18735
18933
|
self.flushers = flushers
|
18736
|
-
# The global
|
18934
|
+
# The global settings.
|
18935
|
+
#
|
18936
|
+
# **\
|
18937
|
+
#
|
18938
|
+
# ****\
|
18737
18939
|
self.global_ = global_
|
18738
|
-
# The
|
18940
|
+
# The input plug-ins.
|
18941
|
+
#
|
18942
|
+
# > You can configure only one input plug-in.
|
18739
18943
|
#
|
18740
18944
|
# This parameter is required.
|
18741
18945
|
self.inputs = inputs
|
18742
|
-
# The sample log.
|
18946
|
+
# The sample log. You can specify multiple sample logs.
|
18743
18947
|
self.log_sample = log_sample
|
18744
18948
|
# The processing plug-ins.
|
18949
|
+
#
|
18950
|
+
# > Logtail supports native plug-ins and extended plug-ins for data processing. For more information, see [Logtail plug-ins overview](https://help.aliyun.com/document_detail/64957.html).
|
18951
|
+
#
|
18952
|
+
# >
|
18953
|
+
#
|
18954
|
+
# * You can use native plug-ins only to collect text logs.
|
18955
|
+
#
|
18956
|
+
# * You cannot add native plug-ins and extended plug-ins at the same time.
|
18957
|
+
#
|
18958
|
+
# * When you add native plug-ins, take note of the following items:
|
18959
|
+
#
|
18960
|
+
# * You must add one of the following Logtail plug-ins for data processing as the first plug-in: Data Parsing (Regex Mode), Data Parsing (Delimiter Mode), Data Parsing (JSON Mode), Data Parsing (NGINX Mode), Data Parsing (Apache Mode), and Data Parsing (IIS Mode).
|
18961
|
+
# * After you add the first plug-in, you can add one Time Parsing plug-in, one Data Filtering plug-in, and multiple Data Masking plug-ins.
|
18745
18962
|
self.processors = processors
|
18746
18963
|
|
18747
18964
|
def validate(self):
|
@@ -19037,9 +19254,13 @@ class UpdateMetricStoreRequest(TeaModel):
|
|
19037
19254
|
mode: str = None,
|
19038
19255
|
ttl: int = None,
|
19039
19256
|
):
|
19257
|
+
# Specifies whether to enable automatic sharding.
|
19040
19258
|
self.auto_split = auto_split
|
19259
|
+
# The maximum number of shards into which existing shards can be automatically split. This parameter is valid only when you set the autoSplit parameter to true.
|
19041
19260
|
self.max_split_shard = max_split_shard
|
19261
|
+
# The type of the Metricstore.
|
19042
19262
|
self.mode = mode
|
19263
|
+
# The retention period of the metric data. Unit: days.
|
19043
19264
|
self.ttl = ttl
|
19044
19265
|
|
19045
19266
|
def validate(self):
|
@@ -19881,9 +20102,7 @@ class UpdateSavedSearchRequest(TeaModel):
|
|
19881
20102
|
#
|
19882
20103
|
# This parameter is required.
|
19883
20104
|
self.savedsearch_name = savedsearch_name
|
19884
|
-
# The
|
19885
|
-
#
|
19886
|
-
# For more information, see Log search overview and Log analysis overview.
|
20105
|
+
# The query statement of the saved search. A query statement consists of a search statement and an analytic statement in the Search statement|Analytic statement format. For more information, see [Log search overview](https://help.aliyun.com/document_detail/43772.html) and [Log analysis overview](https://help.aliyun.com/document_detail/53608.html).
|
19887
20106
|
#
|
19888
20107
|
# This parameter is required.
|
19889
20108
|
self.search_query = search_query
|