alibabacloud-sls20201230 5.4.1__py3-none-any.whl → 5.5.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- alibabacloud_sls20201230/__init__.py +1 -1
- alibabacloud_sls20201230/client.py +1024 -103
- alibabacloud_sls20201230/models.py +691 -101
- {alibabacloud_sls20201230-5.4.1.dist-info → alibabacloud_sls20201230-5.5.1.dist-info}/METADATA +5 -5
- alibabacloud_sls20201230-5.5.1.dist-info/RECORD +8 -0
- alibabacloud_sls20201230-5.4.1.dist-info/RECORD +0 -8
- {alibabacloud_sls20201230-5.4.1.dist-info → alibabacloud_sls20201230-5.5.1.dist-info}/LICENSE +0 -0
- {alibabacloud_sls20201230-5.4.1.dist-info → alibabacloud_sls20201230-5.5.1.dist-info}/WHEEL +0 -0
- {alibabacloud_sls20201230-5.4.1.dist-info → alibabacloud_sls20201230-5.5.1.dist-info}/top_level.txt +0 -0
@@ -878,7 +878,7 @@ class ConsumeProcessor(TeaModel):
|
|
878
878
|
configuration: ConsumeProcessorConfiguration = None,
|
879
879
|
create_time: int = None,
|
880
880
|
description: str = None,
|
881
|
-
|
881
|
+
display_name: str = None,
|
882
882
|
processor_name: str = None,
|
883
883
|
update_time: int = None,
|
884
884
|
):
|
@@ -887,7 +887,7 @@ class ConsumeProcessor(TeaModel):
|
|
887
887
|
self.create_time = create_time
|
888
888
|
self.description = description
|
889
889
|
# This parameter is required.
|
890
|
-
self.
|
890
|
+
self.display_name = display_name
|
891
891
|
# This parameter is required.
|
892
892
|
self.processor_name = processor_name
|
893
893
|
self.update_time = update_time
|
@@ -908,8 +908,8 @@ class ConsumeProcessor(TeaModel):
|
|
908
908
|
result['createTime'] = self.create_time
|
909
909
|
if self.description is not None:
|
910
910
|
result['description'] = self.description
|
911
|
-
if self.
|
912
|
-
result['
|
911
|
+
if self.display_name is not None:
|
912
|
+
result['displayName'] = self.display_name
|
913
913
|
if self.processor_name is not None:
|
914
914
|
result['processorName'] = self.processor_name
|
915
915
|
if self.update_time is not None:
|
@@ -925,8 +925,8 @@ class ConsumeProcessor(TeaModel):
|
|
925
925
|
self.create_time = m.get('createTime')
|
926
926
|
if m.get('description') is not None:
|
927
927
|
self.description = m.get('description')
|
928
|
-
if m.get('
|
929
|
-
self.
|
928
|
+
if m.get('displayName') is not None:
|
929
|
+
self.display_name = m.get('displayName')
|
930
930
|
if m.get('processorName') is not None:
|
931
931
|
self.processor_name = m.get('processorName')
|
932
932
|
if m.get('updateTime') is not None:
|
@@ -1526,7 +1526,7 @@ class IngestProcessor(TeaModel):
|
|
1526
1526
|
configuration: IngestProcessorConfiguration = None,
|
1527
1527
|
create_time: int = None,
|
1528
1528
|
description: str = None,
|
1529
|
-
|
1529
|
+
display_name: str = None,
|
1530
1530
|
processor_name: str = None,
|
1531
1531
|
update_time: int = None,
|
1532
1532
|
):
|
@@ -1535,7 +1535,7 @@ class IngestProcessor(TeaModel):
|
|
1535
1535
|
self.create_time = create_time
|
1536
1536
|
self.description = description
|
1537
1537
|
# This parameter is required.
|
1538
|
-
self.
|
1538
|
+
self.display_name = display_name
|
1539
1539
|
# This parameter is required.
|
1540
1540
|
self.processor_name = processor_name
|
1541
1541
|
self.update_time = update_time
|
@@ -1556,8 +1556,8 @@ class IngestProcessor(TeaModel):
|
|
1556
1556
|
result['createTime'] = self.create_time
|
1557
1557
|
if self.description is not None:
|
1558
1558
|
result['description'] = self.description
|
1559
|
-
if self.
|
1560
|
-
result['
|
1559
|
+
if self.display_name is not None:
|
1560
|
+
result['displayName'] = self.display_name
|
1561
1561
|
if self.processor_name is not None:
|
1562
1562
|
result['processorName'] = self.processor_name
|
1563
1563
|
if self.update_time is not None:
|
@@ -1573,8 +1573,8 @@ class IngestProcessor(TeaModel):
|
|
1573
1573
|
self.create_time = m.get('createTime')
|
1574
1574
|
if m.get('description') is not None:
|
1575
1575
|
self.description = m.get('description')
|
1576
|
-
if m.get('
|
1577
|
-
self.
|
1576
|
+
if m.get('displayName') is not None:
|
1577
|
+
self.display_name = m.get('displayName')
|
1578
1578
|
if m.get('processorName') is not None:
|
1579
1579
|
self.processor_name = m.get('processorName')
|
1580
1580
|
if m.get('updateTime') is not None:
|
@@ -1617,41 +1617,6 @@ class LogContent(TeaModel):
|
|
1617
1617
|
return self
|
1618
1618
|
|
1619
1619
|
|
1620
|
-
class LogTag(TeaModel):
|
1621
|
-
def __init__(
|
1622
|
-
self,
|
1623
|
-
key: str = None,
|
1624
|
-
value: str = None,
|
1625
|
-
):
|
1626
|
-
# This parameter is required.
|
1627
|
-
self.key = key
|
1628
|
-
# This parameter is required.
|
1629
|
-
self.value = value
|
1630
|
-
|
1631
|
-
def validate(self):
|
1632
|
-
pass
|
1633
|
-
|
1634
|
-
def to_map(self):
|
1635
|
-
_map = super().to_map()
|
1636
|
-
if _map is not None:
|
1637
|
-
return _map
|
1638
|
-
|
1639
|
-
result = dict()
|
1640
|
-
if self.key is not None:
|
1641
|
-
result['Key'] = self.key
|
1642
|
-
if self.value is not None:
|
1643
|
-
result['Value'] = self.value
|
1644
|
-
return result
|
1645
|
-
|
1646
|
-
def from_map(self, m: dict = None):
|
1647
|
-
m = m or dict()
|
1648
|
-
if m.get('Key') is not None:
|
1649
|
-
self.key = m.get('Key')
|
1650
|
-
if m.get('Value') is not None:
|
1651
|
-
self.value = m.get('Value')
|
1652
|
-
return self
|
1653
|
-
|
1654
|
-
|
1655
1620
|
class LogItem(TeaModel):
|
1656
1621
|
def __init__(
|
1657
1622
|
self,
|
@@ -1695,29 +1660,62 @@ class LogItem(TeaModel):
|
|
1695
1660
|
return self
|
1696
1661
|
|
1697
1662
|
|
1663
|
+
class LogTag(TeaModel):
|
1664
|
+
def __init__(
|
1665
|
+
self,
|
1666
|
+
key: str = None,
|
1667
|
+
value: str = None,
|
1668
|
+
):
|
1669
|
+
# This parameter is required.
|
1670
|
+
self.key = key
|
1671
|
+
# This parameter is required.
|
1672
|
+
self.value = value
|
1673
|
+
|
1674
|
+
def validate(self):
|
1675
|
+
pass
|
1676
|
+
|
1677
|
+
def to_map(self):
|
1678
|
+
_map = super().to_map()
|
1679
|
+
if _map is not None:
|
1680
|
+
return _map
|
1681
|
+
|
1682
|
+
result = dict()
|
1683
|
+
if self.key is not None:
|
1684
|
+
result['Key'] = self.key
|
1685
|
+
if self.value is not None:
|
1686
|
+
result['Value'] = self.value
|
1687
|
+
return result
|
1688
|
+
|
1689
|
+
def from_map(self, m: dict = None):
|
1690
|
+
m = m or dict()
|
1691
|
+
if m.get('Key') is not None:
|
1692
|
+
self.key = m.get('Key')
|
1693
|
+
if m.get('Value') is not None:
|
1694
|
+
self.value = m.get('Value')
|
1695
|
+
return self
|
1696
|
+
|
1697
|
+
|
1698
1698
|
class LogGroup(TeaModel):
|
1699
1699
|
def __init__(
|
1700
1700
|
self,
|
1701
|
+
log_items: List[LogItem] = None,
|
1701
1702
|
log_tags: List[LogTag] = None,
|
1702
|
-
logs: List[LogItem] = None,
|
1703
1703
|
source: str = None,
|
1704
1704
|
topic: str = None,
|
1705
1705
|
):
|
1706
1706
|
# This parameter is required.
|
1707
|
+
self.log_items = log_items
|
1707
1708
|
self.log_tags = log_tags
|
1708
|
-
# This parameter is required.
|
1709
|
-
self.logs = logs
|
1710
1709
|
self.source = source
|
1711
|
-
# This parameter is required.
|
1712
1710
|
self.topic = topic
|
1713
1711
|
|
1714
1712
|
def validate(self):
|
1715
|
-
if self.
|
1716
|
-
for k in self.
|
1713
|
+
if self.log_items:
|
1714
|
+
for k in self.log_items:
|
1717
1715
|
if k:
|
1718
1716
|
k.validate()
|
1719
|
-
if self.
|
1720
|
-
for k in self.
|
1717
|
+
if self.log_tags:
|
1718
|
+
for k in self.log_tags:
|
1721
1719
|
if k:
|
1722
1720
|
k.validate()
|
1723
1721
|
|
@@ -1727,14 +1725,14 @@ class LogGroup(TeaModel):
|
|
1727
1725
|
return _map
|
1728
1726
|
|
1729
1727
|
result = dict()
|
1728
|
+
result['LogItems'] = []
|
1729
|
+
if self.log_items is not None:
|
1730
|
+
for k in self.log_items:
|
1731
|
+
result['LogItems'].append(k.to_map() if k else None)
|
1730
1732
|
result['LogTags'] = []
|
1731
1733
|
if self.log_tags is not None:
|
1732
1734
|
for k in self.log_tags:
|
1733
1735
|
result['LogTags'].append(k.to_map() if k else None)
|
1734
|
-
result['Logs'] = []
|
1735
|
-
if self.logs is not None:
|
1736
|
-
for k in self.logs:
|
1737
|
-
result['Logs'].append(k.to_map() if k else None)
|
1738
1736
|
if self.source is not None:
|
1739
1737
|
result['Source'] = self.source
|
1740
1738
|
if self.topic is not None:
|
@@ -1743,16 +1741,16 @@ class LogGroup(TeaModel):
|
|
1743
1741
|
|
1744
1742
|
def from_map(self, m: dict = None):
|
1745
1743
|
m = m or dict()
|
1744
|
+
self.log_items = []
|
1745
|
+
if m.get('LogItems') is not None:
|
1746
|
+
for k in m.get('LogItems'):
|
1747
|
+
temp_model = LogItem()
|
1748
|
+
self.log_items.append(temp_model.from_map(k))
|
1746
1749
|
self.log_tags = []
|
1747
1750
|
if m.get('LogTags') is not None:
|
1748
1751
|
for k in m.get('LogTags'):
|
1749
1752
|
temp_model = LogTag()
|
1750
1753
|
self.log_tags.append(temp_model.from_map(k))
|
1751
|
-
self.logs = []
|
1752
|
-
if m.get('Logs') is not None:
|
1753
|
-
for k in m.get('Logs'):
|
1754
|
-
temp_model = LogItem()
|
1755
|
-
self.logs.append(temp_model.from_map(k))
|
1756
1754
|
if m.get('Source') is not None:
|
1757
1755
|
self.source = m.get('Source')
|
1758
1756
|
if m.get('Topic') is not None:
|
@@ -1760,18 +1758,56 @@ class LogGroup(TeaModel):
|
|
1760
1758
|
return self
|
1761
1759
|
|
1762
1760
|
|
1761
|
+
class LogGroupList(TeaModel):
|
1762
|
+
def __init__(
|
1763
|
+
self,
|
1764
|
+
log_group_list: List[LogGroup] = None,
|
1765
|
+
):
|
1766
|
+
# This parameter is required.
|
1767
|
+
self.log_group_list = log_group_list
|
1768
|
+
|
1769
|
+
def validate(self):
|
1770
|
+
if self.log_group_list:
|
1771
|
+
for k in self.log_group_list:
|
1772
|
+
if k:
|
1773
|
+
k.validate()
|
1774
|
+
|
1775
|
+
def to_map(self):
|
1776
|
+
_map = super().to_map()
|
1777
|
+
if _map is not None:
|
1778
|
+
return _map
|
1779
|
+
|
1780
|
+
result = dict()
|
1781
|
+
result['logGroupList'] = []
|
1782
|
+
if self.log_group_list is not None:
|
1783
|
+
for k in self.log_group_list:
|
1784
|
+
result['logGroupList'].append(k.to_map() if k else None)
|
1785
|
+
return result
|
1786
|
+
|
1787
|
+
def from_map(self, m: dict = None):
|
1788
|
+
m = m or dict()
|
1789
|
+
self.log_group_list = []
|
1790
|
+
if m.get('logGroupList') is not None:
|
1791
|
+
for k in m.get('logGroupList'):
|
1792
|
+
temp_model = LogGroup()
|
1793
|
+
self.log_group_list.append(temp_model.from_map(k))
|
1794
|
+
return self
|
1795
|
+
|
1796
|
+
|
1763
1797
|
class LogtailConfigOutputDetail(TeaModel):
|
1764
1798
|
def __init__(
|
1765
1799
|
self,
|
1766
1800
|
endpoint: str = None,
|
1767
1801
|
logstore_name: str = None,
|
1768
1802
|
region: str = None,
|
1803
|
+
telemetry_type: str = None,
|
1769
1804
|
):
|
1770
1805
|
# This parameter is required.
|
1771
1806
|
self.endpoint = endpoint
|
1772
1807
|
# This parameter is required.
|
1773
1808
|
self.logstore_name = logstore_name
|
1774
1809
|
self.region = region
|
1810
|
+
self.telemetry_type = telemetry_type
|
1775
1811
|
|
1776
1812
|
def validate(self):
|
1777
1813
|
pass
|
@@ -1788,6 +1824,8 @@ class LogtailConfigOutputDetail(TeaModel):
|
|
1788
1824
|
result['logstoreName'] = self.logstore_name
|
1789
1825
|
if self.region is not None:
|
1790
1826
|
result['region'] = self.region
|
1827
|
+
if self.telemetry_type is not None:
|
1828
|
+
result['telemetryType'] = self.telemetry_type
|
1791
1829
|
return result
|
1792
1830
|
|
1793
1831
|
def from_map(self, m: dict = None):
|
@@ -1798,6 +1836,8 @@ class LogtailConfigOutputDetail(TeaModel):
|
|
1798
1836
|
self.logstore_name = m.get('logstoreName')
|
1799
1837
|
if m.get('region') is not None:
|
1800
1838
|
self.region = m.get('region')
|
1839
|
+
if m.get('telemetryType') is not None:
|
1840
|
+
self.telemetry_type = m.get('telemetryType')
|
1801
1841
|
return self
|
1802
1842
|
|
1803
1843
|
|
@@ -4202,11 +4242,13 @@ class Logstore(TeaModel):
|
|
4202
4242
|
class Machine(TeaModel):
|
4203
4243
|
def __init__(
|
4204
4244
|
self,
|
4245
|
+
host_id: str = None,
|
4205
4246
|
ip: str = None,
|
4206
4247
|
last_heartbeat_time: int = None,
|
4207
4248
|
machine_uniqueid: str = None,
|
4208
4249
|
userdefined_id: str = None,
|
4209
4250
|
):
|
4251
|
+
self.host_id = host_id
|
4210
4252
|
self.ip = ip
|
4211
4253
|
self.last_heartbeat_time = last_heartbeat_time
|
4212
4254
|
self.machine_uniqueid = machine_uniqueid
|
@@ -4221,6 +4263,8 @@ class Machine(TeaModel):
|
|
4221
4263
|
return _map
|
4222
4264
|
|
4223
4265
|
result = dict()
|
4266
|
+
if self.host_id is not None:
|
4267
|
+
result['host-id'] = self.host_id
|
4224
4268
|
if self.ip is not None:
|
4225
4269
|
result['ip'] = self.ip
|
4226
4270
|
if self.last_heartbeat_time is not None:
|
@@ -4233,6 +4277,8 @@ class Machine(TeaModel):
|
|
4233
4277
|
|
4234
4278
|
def from_map(self, m: dict = None):
|
4235
4279
|
m = m or dict()
|
4280
|
+
if m.get('host-id') is not None:
|
4281
|
+
self.host_id = m.get('host-id')
|
4236
4282
|
if m.get('ip') is not None:
|
4237
4283
|
self.ip = m.get('ip')
|
4238
4284
|
if m.get('lastHeartbeatTime') is not None:
|
@@ -5697,17 +5743,17 @@ class CreateLogStoreRequest(TeaModel):
|
|
5697
5743
|
telemetry_type: str = None,
|
5698
5744
|
ttl: int = None,
|
5699
5745
|
):
|
5700
|
-
# Specifies whether to record public IP
|
5746
|
+
# Specifies whether to record the **public IP address** and **log receiving time**. Default value: false. Valid values:
|
5701
5747
|
#
|
5702
|
-
# * true
|
5703
|
-
# * false
|
5748
|
+
# * true********\
|
5749
|
+
# * false********\
|
5704
5750
|
self.append_meta = append_meta
|
5705
|
-
# Specifies whether to enable automatic sharding.
|
5751
|
+
# Specifies whether to enable automatic sharding. Valid values:
|
5706
5752
|
#
|
5707
5753
|
# * true
|
5708
5754
|
# * false
|
5709
5755
|
self.auto_split = auto_split
|
5710
|
-
# Specifies whether to enable the web tracking feature. Default value: false.
|
5756
|
+
# Specifies whether to enable the web tracking feature. Default value: false. Valid values:
|
5711
5757
|
#
|
5712
5758
|
# * true
|
5713
5759
|
# * false
|
@@ -5731,12 +5777,12 @@ class CreateLogStoreRequest(TeaModel):
|
|
5731
5777
|
self.logstore_name = logstore_name
|
5732
5778
|
# The maximum number of shards into which existing shards can be automatically split. Valid values: 1 to 256.
|
5733
5779
|
#
|
5734
|
-
# > If you set autoSplit to true, you must specify
|
5780
|
+
# > If you set autoSplit to true, you must specify this parameter.
|
5735
5781
|
self.max_split_shard = max_split_shard
|
5736
5782
|
# The type of the Logstore. Simple Log Service provides two types of Logstores: Standard Logstores and Query Logstores. Valid values:
|
5737
5783
|
#
|
5738
5784
|
# * **standard**: Standard Logstore. This type of Logstore supports the log analysis feature and is suitable for scenarios such as real-time monitoring and interactive analysis. You can also use this type of Logstore to build a comprehensive observability system.
|
5739
|
-
# * **query**: Query Logstore. This type of Logstore supports high-performance queries. The index traffic fee of a
|
5785
|
+
# * **query**: Query Logstore. This type of Logstore supports high-performance queries. The index traffic fee of a Query Logstore is approximately half that of a Standard Logstore. Query Logstores do not support SQL analysis. Query Logstores are suitable for scenarios in which the amount of data is large, the log retention period is long, or log analysis is not required. If logs are stored for weeks or months, the log retention period is considered long.
|
5740
5786
|
self.mode = mode
|
5741
5787
|
self.processor_id = processor_id
|
5742
5788
|
# The number of shards.
|
@@ -5750,7 +5796,7 @@ class CreateLogStoreRequest(TeaModel):
|
|
5750
5796
|
# * **None** (default): log data
|
5751
5797
|
# * **Metrics**: metric data
|
5752
5798
|
self.telemetry_type = telemetry_type
|
5753
|
-
# The retention period of data. Unit: days. Valid values: 1 to 3000. If you set this parameter to 3650,
|
5799
|
+
# The retention period of data. Unit: days. Valid values: 1 to 3000. If you set this parameter to 3650, data is permanently stored.
|
5754
5800
|
#
|
5755
5801
|
# This parameter is required.
|
5756
5802
|
self.ttl = ttl
|
@@ -6249,14 +6295,24 @@ class CreateMetricStoreRequest(TeaModel):
|
|
6249
6295
|
shard_count: int = None,
|
6250
6296
|
ttl: int = None,
|
6251
6297
|
):
|
6298
|
+
# Specifies whether to enable automatic sharding.
|
6252
6299
|
self.auto_split = auto_split
|
6300
|
+
# The maximum number of shards into which existing shards can be automatically split. This parameter is valid only when you set the autoSplit parameter to true.
|
6253
6301
|
self.max_split_shard = max_split_shard
|
6302
|
+
# The type of the metric data. Example: prometheus.
|
6254
6303
|
self.metric_type = metric_type
|
6304
|
+
# The type of the Metricstore. For example, you can set the parameter to standard to query Standard Metricstores.
|
6255
6305
|
self.mode = mode
|
6306
|
+
# The name of the Metricstore.
|
6307
|
+
#
|
6256
6308
|
# This parameter is required.
|
6257
6309
|
self.name = name
|
6310
|
+
# The number of shards in the Metricstore.
|
6311
|
+
#
|
6258
6312
|
# This parameter is required.
|
6259
6313
|
self.shard_count = shard_count
|
6314
|
+
# The retention period of the metric data in the Metricstore. Unit: days.
|
6315
|
+
#
|
6260
6316
|
# This parameter is required.
|
6261
6317
|
self.ttl = ttl
|
6262
6318
|
|
@@ -7105,11 +7161,13 @@ class CreateSavedSearchRequest(TeaModel):
|
|
7105
7161
|
#
|
7106
7162
|
# This parameter is required.
|
7107
7163
|
self.savedsearch_name = savedsearch_name
|
7108
|
-
# The query statement of the saved search. A query statement consists of a search statement and an analytic statement in the `Search statement|Analytic statement` format. For more information
|
7164
|
+
# The query statement of the saved search. A query statement consists of a search statement and an analytic statement in the `Search statement|Analytic statement` format. For more information, see [Log search overview](https://help.aliyun.com/document_detail/43772.html) and [Log analysis overview](https://help.aliyun.com/document_detail/53608.html).
|
7109
7165
|
#
|
7110
7166
|
# This parameter is required.
|
7111
7167
|
self.search_query = search_query
|
7112
|
-
# The topic of the
|
7168
|
+
# The topic of the logs.
|
7169
|
+
#
|
7170
|
+
# This parameter is required.
|
7113
7171
|
self.topic = topic
|
7114
7172
|
|
7115
7173
|
def validate(self):
|
@@ -7446,6 +7504,7 @@ class CreateTicketRequest(TeaModel):
|
|
7446
7504
|
expiration_time: int = None,
|
7447
7505
|
):
|
7448
7506
|
self.access_token_expiration_time = access_token_expiration_time
|
7507
|
+
# The validity period of the ticket that is used for logon-free access. Unit: seconds. Default value: 86400. Maximum value: 2592000. The value 86400 specifies one day.
|
7449
7508
|
self.expiration_time = expiration_time
|
7450
7509
|
|
7451
7510
|
def validate(self):
|
@@ -7477,6 +7536,7 @@ class CreateTicketResponseBody(TeaModel):
|
|
7477
7536
|
self,
|
7478
7537
|
ticket: str = None,
|
7479
7538
|
):
|
7539
|
+
# The ticket that is used for logon-free access.
|
7480
7540
|
self.ticket = ticket
|
7481
7541
|
|
7482
7542
|
def validate(self):
|
@@ -8431,6 +8491,154 @@ class DeleteStoreViewResponse(TeaModel):
|
|
8431
8491
|
return self
|
8432
8492
|
|
8433
8493
|
|
8494
|
+
class DescribeRegionsRequest(TeaModel):
|
8495
|
+
def __init__(
|
8496
|
+
self,
|
8497
|
+
language: str = None,
|
8498
|
+
):
|
8499
|
+
self.language = language
|
8500
|
+
|
8501
|
+
def validate(self):
|
8502
|
+
pass
|
8503
|
+
|
8504
|
+
def to_map(self):
|
8505
|
+
_map = super().to_map()
|
8506
|
+
if _map is not None:
|
8507
|
+
return _map
|
8508
|
+
|
8509
|
+
result = dict()
|
8510
|
+
if self.language is not None:
|
8511
|
+
result['language'] = self.language
|
8512
|
+
return result
|
8513
|
+
|
8514
|
+
def from_map(self, m: dict = None):
|
8515
|
+
m = m or dict()
|
8516
|
+
if m.get('language') is not None:
|
8517
|
+
self.language = m.get('language')
|
8518
|
+
return self
|
8519
|
+
|
8520
|
+
|
8521
|
+
class DescribeRegionsResponseBodyRegions(TeaModel):
|
8522
|
+
def __init__(
|
8523
|
+
self,
|
8524
|
+
internet_endpoint: str = None,
|
8525
|
+
intranet_endpoint: str = None,
|
8526
|
+
local_name: str = None,
|
8527
|
+
region: str = None,
|
8528
|
+
):
|
8529
|
+
self.internet_endpoint = internet_endpoint
|
8530
|
+
self.intranet_endpoint = intranet_endpoint
|
8531
|
+
self.local_name = local_name
|
8532
|
+
self.region = region
|
8533
|
+
|
8534
|
+
def validate(self):
|
8535
|
+
pass
|
8536
|
+
|
8537
|
+
def to_map(self):
|
8538
|
+
_map = super().to_map()
|
8539
|
+
if _map is not None:
|
8540
|
+
return _map
|
8541
|
+
|
8542
|
+
result = dict()
|
8543
|
+
if self.internet_endpoint is not None:
|
8544
|
+
result['internetEndpoint'] = self.internet_endpoint
|
8545
|
+
if self.intranet_endpoint is not None:
|
8546
|
+
result['intranetEndpoint'] = self.intranet_endpoint
|
8547
|
+
if self.local_name is not None:
|
8548
|
+
result['localName'] = self.local_name
|
8549
|
+
if self.region is not None:
|
8550
|
+
result['region'] = self.region
|
8551
|
+
return result
|
8552
|
+
|
8553
|
+
def from_map(self, m: dict = None):
|
8554
|
+
m = m or dict()
|
8555
|
+
if m.get('internetEndpoint') is not None:
|
8556
|
+
self.internet_endpoint = m.get('internetEndpoint')
|
8557
|
+
if m.get('intranetEndpoint') is not None:
|
8558
|
+
self.intranet_endpoint = m.get('intranetEndpoint')
|
8559
|
+
if m.get('localName') is not None:
|
8560
|
+
self.local_name = m.get('localName')
|
8561
|
+
if m.get('region') is not None:
|
8562
|
+
self.region = m.get('region')
|
8563
|
+
return self
|
8564
|
+
|
8565
|
+
|
8566
|
+
class DescribeRegionsResponseBody(TeaModel):
|
8567
|
+
def __init__(
|
8568
|
+
self,
|
8569
|
+
regions: List[DescribeRegionsResponseBodyRegions] = None,
|
8570
|
+
):
|
8571
|
+
self.regions = regions
|
8572
|
+
|
8573
|
+
def validate(self):
|
8574
|
+
if self.regions:
|
8575
|
+
for k in self.regions:
|
8576
|
+
if k:
|
8577
|
+
k.validate()
|
8578
|
+
|
8579
|
+
def to_map(self):
|
8580
|
+
_map = super().to_map()
|
8581
|
+
if _map is not None:
|
8582
|
+
return _map
|
8583
|
+
|
8584
|
+
result = dict()
|
8585
|
+
result['regions'] = []
|
8586
|
+
if self.regions is not None:
|
8587
|
+
for k in self.regions:
|
8588
|
+
result['regions'].append(k.to_map() if k else None)
|
8589
|
+
return result
|
8590
|
+
|
8591
|
+
def from_map(self, m: dict = None):
|
8592
|
+
m = m or dict()
|
8593
|
+
self.regions = []
|
8594
|
+
if m.get('regions') is not None:
|
8595
|
+
for k in m.get('regions'):
|
8596
|
+
temp_model = DescribeRegionsResponseBodyRegions()
|
8597
|
+
self.regions.append(temp_model.from_map(k))
|
8598
|
+
return self
|
8599
|
+
|
8600
|
+
|
8601
|
+
class DescribeRegionsResponse(TeaModel):
|
8602
|
+
def __init__(
|
8603
|
+
self,
|
8604
|
+
headers: Dict[str, str] = None,
|
8605
|
+
status_code: int = None,
|
8606
|
+
body: DescribeRegionsResponseBody = None,
|
8607
|
+
):
|
8608
|
+
self.headers = headers
|
8609
|
+
self.status_code = status_code
|
8610
|
+
self.body = body
|
8611
|
+
|
8612
|
+
def validate(self):
|
8613
|
+
if self.body:
|
8614
|
+
self.body.validate()
|
8615
|
+
|
8616
|
+
def to_map(self):
|
8617
|
+
_map = super().to_map()
|
8618
|
+
if _map is not None:
|
8619
|
+
return _map
|
8620
|
+
|
8621
|
+
result = dict()
|
8622
|
+
if self.headers is not None:
|
8623
|
+
result['headers'] = self.headers
|
8624
|
+
if self.status_code is not None:
|
8625
|
+
result['statusCode'] = self.status_code
|
8626
|
+
if self.body is not None:
|
8627
|
+
result['body'] = self.body.to_map()
|
8628
|
+
return result
|
8629
|
+
|
8630
|
+
def from_map(self, m: dict = None):
|
8631
|
+
m = m or dict()
|
8632
|
+
if m.get('headers') is not None:
|
8633
|
+
self.headers = m.get('headers')
|
8634
|
+
if m.get('statusCode') is not None:
|
8635
|
+
self.status_code = m.get('statusCode')
|
8636
|
+
if m.get('body') is not None:
|
8637
|
+
temp_model = DescribeRegionsResponseBody()
|
8638
|
+
self.body = temp_model.from_map(m['body'])
|
8639
|
+
return self
|
8640
|
+
|
8641
|
+
|
8434
8642
|
class DisableAlertResponse(TeaModel):
|
8435
8643
|
def __init__(
|
8436
8644
|
self,
|
@@ -9411,7 +9619,6 @@ class GetContextLogsRequest(TeaModel):
|
|
9411
9619
|
forward_lines: int = None,
|
9412
9620
|
pack_id: str = None,
|
9413
9621
|
pack_meta: str = None,
|
9414
|
-
type: str = None,
|
9415
9622
|
):
|
9416
9623
|
# The number of logs that you want to obtain and are generated before the generation time of the start log. Valid values: `(0,100]`.
|
9417
9624
|
#
|
@@ -9429,10 +9636,6 @@ class GetContextLogsRequest(TeaModel):
|
|
9429
9636
|
#
|
9430
9637
|
# This parameter is required.
|
9431
9638
|
self.pack_meta = pack_meta
|
9432
|
-
# The type of the data in the Logstore. Set the value to context_log.
|
9433
|
-
#
|
9434
|
-
# This parameter is required.
|
9435
|
-
self.type = type
|
9436
9639
|
|
9437
9640
|
def validate(self):
|
9438
9641
|
pass
|
@@ -9451,8 +9654,6 @@ class GetContextLogsRequest(TeaModel):
|
|
9451
9654
|
result['pack_id'] = self.pack_id
|
9452
9655
|
if self.pack_meta is not None:
|
9453
9656
|
result['pack_meta'] = self.pack_meta
|
9454
|
-
if self.type is not None:
|
9455
|
-
result['type'] = self.type
|
9456
9657
|
return result
|
9457
9658
|
|
9458
9659
|
def from_map(self, m: dict = None):
|
@@ -9465,8 +9666,6 @@ class GetContextLogsRequest(TeaModel):
|
|
9465
9666
|
self.pack_id = m.get('pack_id')
|
9466
9667
|
if m.get('pack_meta') is not None:
|
9467
9668
|
self.pack_meta = m.get('pack_meta')
|
9468
|
-
if m.get('type') is not None:
|
9469
|
-
self.type = m.get('type')
|
9470
9669
|
return self
|
9471
9670
|
|
9472
9671
|
|
@@ -9950,6 +10149,7 @@ class GetDownloadJobResponseBodyExecutionDetails(TeaModel):
|
|
9950
10149
|
file_path: str = None,
|
9951
10150
|
file_size: int = None,
|
9952
10151
|
log_count: int = None,
|
10152
|
+
notice: str = None,
|
9953
10153
|
progress: int = None,
|
9954
10154
|
):
|
9955
10155
|
self.check_sum = check_sum
|
@@ -9963,6 +10163,7 @@ class GetDownloadJobResponseBodyExecutionDetails(TeaModel):
|
|
9963
10163
|
self.file_size = file_size
|
9964
10164
|
# 下载日志条数
|
9965
10165
|
self.log_count = log_count
|
10166
|
+
self.notice = notice
|
9966
10167
|
# 下载进度
|
9967
10168
|
self.progress = progress
|
9968
10169
|
|
@@ -9987,6 +10188,8 @@ class GetDownloadJobResponseBodyExecutionDetails(TeaModel):
|
|
9987
10188
|
result['fileSize'] = self.file_size
|
9988
10189
|
if self.log_count is not None:
|
9989
10190
|
result['logCount'] = self.log_count
|
10191
|
+
if self.notice is not None:
|
10192
|
+
result['notice'] = self.notice
|
9990
10193
|
if self.progress is not None:
|
9991
10194
|
result['progress'] = self.progress
|
9992
10195
|
return result
|
@@ -10005,6 +10208,8 @@ class GetDownloadJobResponseBodyExecutionDetails(TeaModel):
|
|
10005
10208
|
self.file_size = m.get('fileSize')
|
10006
10209
|
if m.get('logCount') is not None:
|
10007
10210
|
self.log_count = m.get('logCount')
|
10211
|
+
if m.get('notice') is not None:
|
10212
|
+
self.notice = m.get('notice')
|
10008
10213
|
if m.get('progress') is not None:
|
10009
10214
|
self.progress = m.get('progress')
|
10010
10215
|
return self
|
@@ -10751,9 +10956,9 @@ class GetLogsRequest(TeaModel):
|
|
10751
10956
|
#
|
10752
10957
|
# This parameter is required.
|
10753
10958
|
self.from_ = from_
|
10754
|
-
# The maximum number of logs to return for the request. This parameter takes effect only when the query parameter is set to a search statement. Minimum value: 0. Maximum value: 100. Default value: 100.
|
10959
|
+
# The maximum number of logs to return for the request. This parameter takes effect only when the query parameter is set to a search statement. Minimum value: 0. Maximum value: 100. Default value: 100. For more information, see [Perform paged queries](https://help.aliyun.com/document_detail/89994.html).
|
10755
10960
|
self.line = line
|
10756
|
-
# The line from which the query starts. This parameter takes effect only when the query parameter is set to a search statement. Default value: 0.
|
10961
|
+
# The line from which the query starts. This parameter takes effect only when the query parameter is set to a search statement. Default value: 0. For more information, see [Perform paged queries](https://help.aliyun.com/document_detail/89994.html).
|
10757
10962
|
self.offset = offset
|
10758
10963
|
# Specifies whether to enable the Dedicated SQL feature. For more information, see [Enable Dedicated SQL](https://help.aliyun.com/document_detail/223777.html). Valid values:
|
10759
10964
|
#
|
@@ -10790,7 +10995,7 @@ class GetLogsRequest(TeaModel):
|
|
10790
10995
|
#
|
10791
10996
|
# This parameter is required.
|
10792
10997
|
self.to = to
|
10793
|
-
# The topic of the logs. The default value is
|
10998
|
+
# The topic of the logs. The default value is an empty string. For more information, see [Topic](https://help.aliyun.com/document_detail/48881.html).
|
10794
10999
|
self.topic = topic
|
10795
11000
|
|
10796
11001
|
def validate(self):
|
@@ -10887,7 +11092,10 @@ class GetLogsV2Headers(TeaModel):
|
|
10887
11092
|
accept_encoding: str = None,
|
10888
11093
|
):
|
10889
11094
|
self.common_headers = common_headers
|
10890
|
-
# The compression
|
11095
|
+
# The compression format.
|
11096
|
+
#
|
11097
|
+
# * For Java, Python, and Go, only the lz4 and gzip algorithms are supported for decompression.
|
11098
|
+
# * For PHP, JavaScript, and C#, only the gzip algorithm is supported for decompression.
|
10891
11099
|
#
|
10892
11100
|
# This parameter is required.
|
10893
11101
|
self.accept_encoding = accept_encoding
|
@@ -11535,14 +11743,23 @@ class GetMetricStoreResponseBody(TeaModel):
|
|
11535
11743
|
shard_count: int = None,
|
11536
11744
|
ttl: int = None,
|
11537
11745
|
):
|
11746
|
+
# Indicates whether the automatic sharding feature is enabled.
|
11538
11747
|
self.auto_split = auto_split
|
11748
|
+
# The creation time. The value is a UNIX timestamp.
|
11539
11749
|
self.create_time = create_time
|
11750
|
+
# The last update time. The value is a UNIX timestamp.
|
11540
11751
|
self.last_modify_time = last_modify_time
|
11752
|
+
# The maximum number of shards into which existing shards can be automatically split.
|
11541
11753
|
self.max_split_shard = max_split_shard
|
11754
|
+
# The metric type of the Metricstore. Example: prometheus.
|
11542
11755
|
self.metric_type = metric_type
|
11756
|
+
# The specification type of the Metricstore. Example: standard.
|
11543
11757
|
self.mode = mode
|
11758
|
+
# The name of the Metricstore.
|
11544
11759
|
self.name = name
|
11760
|
+
# The number of shards.
|
11545
11761
|
self.shard_count = shard_count
|
11762
|
+
# The retention period. Unit: days.
|
11546
11763
|
self.ttl = ttl
|
11547
11764
|
|
11548
11765
|
def validate(self):
|
@@ -12996,6 +13213,7 @@ class ListCollectionPoliciesResponseBodyDataCentralizeConfig(TeaModel):
|
|
12996
13213
|
self.dest_logstore = dest_logstore
|
12997
13214
|
self.dest_project = dest_project
|
12998
13215
|
self.dest_region = dest_region
|
13216
|
+
# The data retention period for centralized storage. Unit: days.
|
12999
13217
|
self.dest_ttl = dest_ttl
|
13000
13218
|
|
13001
13219
|
def validate(self):
|
@@ -13156,6 +13374,7 @@ class ListCollectionPoliciesResponseBodyData(TeaModel):
|
|
13156
13374
|
product_code: str = None,
|
13157
13375
|
resource_directory: ListCollectionPoliciesResponseBodyDataResourceDirectory = None,
|
13158
13376
|
):
|
13377
|
+
# The configuration for centralized storage.
|
13159
13378
|
self.centralize_config = centralize_config
|
13160
13379
|
self.centralize_enabled = centralize_enabled
|
13161
13380
|
self.data_code = data_code
|
@@ -13322,6 +13541,7 @@ class ListCollectionPoliciesResponseBody(TeaModel):
|
|
13322
13541
|
total_count: int = None,
|
13323
13542
|
):
|
13324
13543
|
self.current_count = current_count
|
13544
|
+
# The data of the policies that are matched against the query conditions. The data is returned based on paginated results.
|
13325
13545
|
self.data = data
|
13326
13546
|
self.statistics = statistics
|
13327
13547
|
self.total_count = total_count
|
@@ -14060,6 +14280,7 @@ class ListDownloadJobsResponseBodyResultsExecutionDetails(TeaModel):
|
|
14060
14280
|
file_path: str = None,
|
14061
14281
|
file_size: int = None,
|
14062
14282
|
log_count: int = None,
|
14283
|
+
notice: str = None,
|
14063
14284
|
progress: int = None,
|
14064
14285
|
):
|
14065
14286
|
self.check_sum = check_sum
|
@@ -14073,6 +14294,7 @@ class ListDownloadJobsResponseBodyResultsExecutionDetails(TeaModel):
|
|
14073
14294
|
self.file_size = file_size
|
14074
14295
|
# 下载日志条数
|
14075
14296
|
self.log_count = log_count
|
14297
|
+
self.notice = notice
|
14076
14298
|
# 下载进度
|
14077
14299
|
self.progress = progress
|
14078
14300
|
|
@@ -14097,6 +14319,8 @@ class ListDownloadJobsResponseBodyResultsExecutionDetails(TeaModel):
|
|
14097
14319
|
result['fileSize'] = self.file_size
|
14098
14320
|
if self.log_count is not None:
|
14099
14321
|
result['logCount'] = self.log_count
|
14322
|
+
if self.notice is not None:
|
14323
|
+
result['notice'] = self.notice
|
14100
14324
|
if self.progress is not None:
|
14101
14325
|
result['progress'] = self.progress
|
14102
14326
|
return result
|
@@ -14115,6 +14339,8 @@ class ListDownloadJobsResponseBodyResultsExecutionDetails(TeaModel):
|
|
14115
14339
|
self.file_size = m.get('fileSize')
|
14116
14340
|
if m.get('logCount') is not None:
|
14117
14341
|
self.log_count = m.get('logCount')
|
14342
|
+
if m.get('notice') is not None:
|
14343
|
+
self.notice = m.get('notice')
|
14118
14344
|
if m.get('progress') is not None:
|
14119
14345
|
self.progress = m.get('progress')
|
14120
14346
|
return self
|
@@ -14944,9 +15170,13 @@ class ListMetricStoresRequest(TeaModel):
|
|
14944
15170
|
offset: int = None,
|
14945
15171
|
size: int = None,
|
14946
15172
|
):
|
15173
|
+
# The type of the Metricstore. For example, you can set the parameter to standard to query Standard Metricstores.
|
14947
15174
|
self.mode = mode
|
15175
|
+
# The name of the Metricstore. Fuzzy search is supported. If you do not specify this parameter, all Metricstores are involved.
|
14948
15176
|
self.name = name
|
15177
|
+
# The start position of the query.
|
14949
15178
|
self.offset = offset
|
15179
|
+
# The number of entries per page.
|
14950
15180
|
self.size = size
|
14951
15181
|
|
14952
15182
|
def validate(self):
|
@@ -14988,8 +15218,11 @@ class ListMetricStoresResponseBody(TeaModel):
|
|
14988
15218
|
metricstores: List[str] = None,
|
14989
15219
|
total: int = None,
|
14990
15220
|
):
|
15221
|
+
# The total number of entries returned.
|
14991
15222
|
self.count = count
|
15223
|
+
# The names of the Metricstores.
|
14992
15224
|
self.metricstores = metricstores
|
15225
|
+
# The total number of queried Metricstores.
|
14993
15226
|
self.total = total
|
14994
15227
|
|
14995
15228
|
def validate(self):
|
@@ -16403,6 +16636,128 @@ class OpenSlsServiceResponse(TeaModel):
|
|
16403
16636
|
return self
|
16404
16637
|
|
16405
16638
|
|
16639
|
+
class PullLogsHeaders(TeaModel):
|
16640
|
+
def __init__(
|
16641
|
+
self,
|
16642
|
+
common_headers: Dict[str, str] = None,
|
16643
|
+
accept_encoding: str = None,
|
16644
|
+
):
|
16645
|
+
self.common_headers = common_headers
|
16646
|
+
self.accept_encoding = accept_encoding
|
16647
|
+
|
16648
|
+
def validate(self):
|
16649
|
+
pass
|
16650
|
+
|
16651
|
+
def to_map(self):
|
16652
|
+
_map = super().to_map()
|
16653
|
+
if _map is not None:
|
16654
|
+
return _map
|
16655
|
+
|
16656
|
+
result = dict()
|
16657
|
+
if self.common_headers is not None:
|
16658
|
+
result['commonHeaders'] = self.common_headers
|
16659
|
+
if self.accept_encoding is not None:
|
16660
|
+
result['Accept-Encoding'] = self.accept_encoding
|
16661
|
+
return result
|
16662
|
+
|
16663
|
+
def from_map(self, m: dict = None):
|
16664
|
+
m = m or dict()
|
16665
|
+
if m.get('commonHeaders') is not None:
|
16666
|
+
self.common_headers = m.get('commonHeaders')
|
16667
|
+
if m.get('Accept-Encoding') is not None:
|
16668
|
+
self.accept_encoding = m.get('Accept-Encoding')
|
16669
|
+
return self
|
16670
|
+
|
16671
|
+
|
16672
|
+
class PullLogsRequest(TeaModel):
|
16673
|
+
def __init__(
|
16674
|
+
self,
|
16675
|
+
count: int = None,
|
16676
|
+
cursor: str = None,
|
16677
|
+
end_cursor: str = None,
|
16678
|
+
query: str = None,
|
16679
|
+
):
|
16680
|
+
# This parameter is required.
|
16681
|
+
self.count = count
|
16682
|
+
# This parameter is required.
|
16683
|
+
self.cursor = cursor
|
16684
|
+
self.end_cursor = end_cursor
|
16685
|
+
# The SPL statement that is used to filter data. For more information, see [SPL instructions](https://help.aliyun.com/document_detail/2536530.html).
|
16686
|
+
self.query = query
|
16687
|
+
|
16688
|
+
def validate(self):
|
16689
|
+
pass
|
16690
|
+
|
16691
|
+
def to_map(self):
|
16692
|
+
_map = super().to_map()
|
16693
|
+
if _map is not None:
|
16694
|
+
return _map
|
16695
|
+
|
16696
|
+
result = dict()
|
16697
|
+
if self.count is not None:
|
16698
|
+
result['count'] = self.count
|
16699
|
+
if self.cursor is not None:
|
16700
|
+
result['cursor'] = self.cursor
|
16701
|
+
if self.end_cursor is not None:
|
16702
|
+
result['end_cursor'] = self.end_cursor
|
16703
|
+
if self.query is not None:
|
16704
|
+
result['query'] = self.query
|
16705
|
+
return result
|
16706
|
+
|
16707
|
+
def from_map(self, m: dict = None):
|
16708
|
+
m = m or dict()
|
16709
|
+
if m.get('count') is not None:
|
16710
|
+
self.count = m.get('count')
|
16711
|
+
if m.get('cursor') is not None:
|
16712
|
+
self.cursor = m.get('cursor')
|
16713
|
+
if m.get('end_cursor') is not None:
|
16714
|
+
self.end_cursor = m.get('end_cursor')
|
16715
|
+
if m.get('query') is not None:
|
16716
|
+
self.query = m.get('query')
|
16717
|
+
return self
|
16718
|
+
|
16719
|
+
|
16720
|
+
class PullLogsResponse(TeaModel):
|
16721
|
+
def __init__(
|
16722
|
+
self,
|
16723
|
+
headers: Dict[str, str] = None,
|
16724
|
+
status_code: int = None,
|
16725
|
+
body: LogGroupList = None,
|
16726
|
+
):
|
16727
|
+
self.headers = headers
|
16728
|
+
self.status_code = status_code
|
16729
|
+
self.body = body
|
16730
|
+
|
16731
|
+
def validate(self):
|
16732
|
+
if self.body:
|
16733
|
+
self.body.validate()
|
16734
|
+
|
16735
|
+
def to_map(self):
|
16736
|
+
_map = super().to_map()
|
16737
|
+
if _map is not None:
|
16738
|
+
return _map
|
16739
|
+
|
16740
|
+
result = dict()
|
16741
|
+
if self.headers is not None:
|
16742
|
+
result['headers'] = self.headers
|
16743
|
+
if self.status_code is not None:
|
16744
|
+
result['statusCode'] = self.status_code
|
16745
|
+
if self.body is not None:
|
16746
|
+
result['body'] = self.body.to_map()
|
16747
|
+
return result
|
16748
|
+
|
16749
|
+
def from_map(self, m: dict = None):
|
16750
|
+
m = m or dict()
|
16751
|
+
if m.get('headers') is not None:
|
16752
|
+
self.headers = m.get('headers')
|
16753
|
+
if m.get('statusCode') is not None:
|
16754
|
+
self.status_code = m.get('statusCode')
|
16755
|
+
if m.get('body') is not None:
|
16756
|
+
temp_model = LogGroupList()
|
16757
|
+
self.body = temp_model.from_map(m['body'])
|
16758
|
+
return self
|
16759
|
+
|
16760
|
+
|
16406
16761
|
class PutAnnotationDataRequest(TeaModel):
|
16407
16762
|
def __init__(
|
16408
16763
|
self,
|
@@ -16480,6 +16835,105 @@ class PutAnnotationDataResponse(TeaModel):
|
|
16480
16835
|
return self
|
16481
16836
|
|
16482
16837
|
|
16838
|
+
class PutLogsHeaders(TeaModel):
|
16839
|
+
def __init__(
|
16840
|
+
self,
|
16841
|
+
common_headers: Dict[str, str] = None,
|
16842
|
+
x_log_compresstype: str = None,
|
16843
|
+
):
|
16844
|
+
self.common_headers = common_headers
|
16845
|
+
# The compression format. lz4 and gzip are supported.
|
16846
|
+
#
|
16847
|
+
# This parameter is required.
|
16848
|
+
self.x_log_compresstype = x_log_compresstype
|
16849
|
+
|
16850
|
+
def validate(self):
|
16851
|
+
pass
|
16852
|
+
|
16853
|
+
def to_map(self):
|
16854
|
+
_map = super().to_map()
|
16855
|
+
if _map is not None:
|
16856
|
+
return _map
|
16857
|
+
|
16858
|
+
result = dict()
|
16859
|
+
if self.common_headers is not None:
|
16860
|
+
result['commonHeaders'] = self.common_headers
|
16861
|
+
if self.x_log_compresstype is not None:
|
16862
|
+
result['x-log-compresstype'] = self.x_log_compresstype
|
16863
|
+
return result
|
16864
|
+
|
16865
|
+
def from_map(self, m: dict = None):
|
16866
|
+
m = m or dict()
|
16867
|
+
if m.get('commonHeaders') is not None:
|
16868
|
+
self.common_headers = m.get('commonHeaders')
|
16869
|
+
if m.get('x-log-compresstype') is not None:
|
16870
|
+
self.x_log_compresstype = m.get('x-log-compresstype')
|
16871
|
+
return self
|
16872
|
+
|
16873
|
+
|
16874
|
+
class PutLogsRequest(TeaModel):
|
16875
|
+
def __init__(
|
16876
|
+
self,
|
16877
|
+
body: LogGroup = None,
|
16878
|
+
):
|
16879
|
+
# The compressed Protobuf data.
|
16880
|
+
self.body = body
|
16881
|
+
|
16882
|
+
def validate(self):
|
16883
|
+
if self.body:
|
16884
|
+
self.body.validate()
|
16885
|
+
|
16886
|
+
def to_map(self):
|
16887
|
+
_map = super().to_map()
|
16888
|
+
if _map is not None:
|
16889
|
+
return _map
|
16890
|
+
|
16891
|
+
result = dict()
|
16892
|
+
if self.body is not None:
|
16893
|
+
result['body'] = self.body.to_map()
|
16894
|
+
return result
|
16895
|
+
|
16896
|
+
def from_map(self, m: dict = None):
|
16897
|
+
m = m or dict()
|
16898
|
+
if m.get('body') is not None:
|
16899
|
+
temp_model = LogGroup()
|
16900
|
+
self.body = temp_model.from_map(m['body'])
|
16901
|
+
return self
|
16902
|
+
|
16903
|
+
|
16904
|
+
class PutLogsResponse(TeaModel):
|
16905
|
+
def __init__(
|
16906
|
+
self,
|
16907
|
+
headers: Dict[str, str] = None,
|
16908
|
+
status_code: int = None,
|
16909
|
+
):
|
16910
|
+
self.headers = headers
|
16911
|
+
self.status_code = status_code
|
16912
|
+
|
16913
|
+
def validate(self):
|
16914
|
+
pass
|
16915
|
+
|
16916
|
+
def to_map(self):
|
16917
|
+
_map = super().to_map()
|
16918
|
+
if _map is not None:
|
16919
|
+
return _map
|
16920
|
+
|
16921
|
+
result = dict()
|
16922
|
+
if self.headers is not None:
|
16923
|
+
result['headers'] = self.headers
|
16924
|
+
if self.status_code is not None:
|
16925
|
+
result['statusCode'] = self.status_code
|
16926
|
+
return result
|
16927
|
+
|
16928
|
+
def from_map(self, m: dict = None):
|
16929
|
+
m = m or dict()
|
16930
|
+
if m.get('headers') is not None:
|
16931
|
+
self.headers = m.get('headers')
|
16932
|
+
if m.get('statusCode') is not None:
|
16933
|
+
self.status_code = m.get('statusCode')
|
16934
|
+
return self
|
16935
|
+
|
16936
|
+
|
16483
16937
|
class PutProjectPolicyRequest(TeaModel):
|
16484
16938
|
def __init__(
|
16485
16939
|
self,
|
@@ -16697,6 +17151,7 @@ class RefreshTokenRequest(TeaModel):
|
|
16697
17151
|
ticket: str = None,
|
16698
17152
|
):
|
16699
17153
|
self.access_token_expiration_time = access_token_expiration_time
|
17154
|
+
# The ticket that is used for logon-free access.
|
16700
17155
|
self.ticket = ticket
|
16701
17156
|
|
16702
17157
|
def validate(self):
|
@@ -17766,7 +18221,7 @@ class UpdateDashboardRequest(TeaModel):
|
|
17766
18221
|
description: str = None,
|
17767
18222
|
display_name: str = None,
|
17768
18223
|
):
|
17769
|
-
# The
|
18224
|
+
# The attribute values of the dashboard.
|
17770
18225
|
self.attribute = attribute
|
17771
18226
|
# The charts on the dashboard.
|
17772
18227
|
#
|
@@ -18011,7 +18466,6 @@ class UpdateLogStoreRequest(TeaModel):
|
|
18011
18466
|
logstore_name: str = None,
|
18012
18467
|
max_split_shard: int = None,
|
18013
18468
|
mode: str = None,
|
18014
|
-
processor_id: str = None,
|
18015
18469
|
shard_count: int = None,
|
18016
18470
|
telemetry_type: str = None,
|
18017
18471
|
ttl: int = None,
|
@@ -18050,7 +18504,6 @@ class UpdateLogStoreRequest(TeaModel):
|
|
18050
18504
|
# * **standard**: Standard Logstore. This type of Logstore supports the log analysis feature and is suitable for scenarios such as real-time monitoring and interactive analysis. You can also use this type of Logstore to build a comprehensive observability system.
|
18051
18505
|
# * **query**: Query Logstore. This type of Logstore supports high-performance queries. The index traffic fee of a Query Logstore is approximately half that of a Standard Logstore. Query Logstores do not support SQL analysis. Query Logstores are suitable for scenarios in which the amount of data is large, the log retention period is long, or log analysis is not required. If logs are stored for weeks or months, the log retention period is considered long.
|
18052
18506
|
self.mode = mode
|
18053
|
-
self.processor_id = processor_id
|
18054
18507
|
# The number of shards.
|
18055
18508
|
#
|
18056
18509
|
# > You cannot call the UpdateLogStore operation to change the number of shards. You can call the SplitShard or MergeShards operation to change the number of shards.
|
@@ -18093,8 +18546,6 @@ class UpdateLogStoreRequest(TeaModel):
|
|
18093
18546
|
result['maxSplitShard'] = self.max_split_shard
|
18094
18547
|
if self.mode is not None:
|
18095
18548
|
result['mode'] = self.mode
|
18096
|
-
if self.processor_id is not None:
|
18097
|
-
result['processorId'] = self.processor_id
|
18098
18549
|
if self.shard_count is not None:
|
18099
18550
|
result['shardCount'] = self.shard_count
|
18100
18551
|
if self.telemetry_type is not None:
|
@@ -18124,8 +18575,6 @@ class UpdateLogStoreRequest(TeaModel):
|
|
18124
18575
|
self.max_split_shard = m.get('maxSplitShard')
|
18125
18576
|
if m.get('mode') is not None:
|
18126
18577
|
self.mode = m.get('mode')
|
18127
|
-
if m.get('processorId') is not None:
|
18128
|
-
self.processor_id = m.get('processorId')
|
18129
18578
|
if m.get('shardCount') is not None:
|
18130
18579
|
self.shard_count = m.get('shardCount')
|
18131
18580
|
if m.get('telemetryType') is not None:
|
@@ -18168,6 +18617,120 @@ class UpdateLogStoreResponse(TeaModel):
|
|
18168
18617
|
return self
|
18169
18618
|
|
18170
18619
|
|
18620
|
+
class UpdateLogStoreEncryptionRequestUserCmkInfo(TeaModel):
|
18621
|
+
def __init__(
|
18622
|
+
self,
|
18623
|
+
key_id: str = None,
|
18624
|
+
region_id: str = None,
|
18625
|
+
role_arn: str = None,
|
18626
|
+
):
|
18627
|
+
self.key_id = key_id
|
18628
|
+
self.region_id = region_id
|
18629
|
+
self.role_arn = role_arn
|
18630
|
+
|
18631
|
+
def validate(self):
|
18632
|
+
pass
|
18633
|
+
|
18634
|
+
def to_map(self):
|
18635
|
+
_map = super().to_map()
|
18636
|
+
if _map is not None:
|
18637
|
+
return _map
|
18638
|
+
|
18639
|
+
result = dict()
|
18640
|
+
if self.key_id is not None:
|
18641
|
+
result['keyId'] = self.key_id
|
18642
|
+
if self.region_id is not None:
|
18643
|
+
result['regionId'] = self.region_id
|
18644
|
+
if self.role_arn is not None:
|
18645
|
+
result['roleArn'] = self.role_arn
|
18646
|
+
return result
|
18647
|
+
|
18648
|
+
def from_map(self, m: dict = None):
|
18649
|
+
m = m or dict()
|
18650
|
+
if m.get('keyId') is not None:
|
18651
|
+
self.key_id = m.get('keyId')
|
18652
|
+
if m.get('regionId') is not None:
|
18653
|
+
self.region_id = m.get('regionId')
|
18654
|
+
if m.get('roleArn') is not None:
|
18655
|
+
self.role_arn = m.get('roleArn')
|
18656
|
+
return self
|
18657
|
+
|
18658
|
+
|
18659
|
+
class UpdateLogStoreEncryptionRequest(TeaModel):
|
18660
|
+
def __init__(
|
18661
|
+
self,
|
18662
|
+
enable: bool = None,
|
18663
|
+
encrypt_type: str = None,
|
18664
|
+
user_cmk_info: UpdateLogStoreEncryptionRequestUserCmkInfo = None,
|
18665
|
+
):
|
18666
|
+
# This parameter is required.
|
18667
|
+
self.enable = enable
|
18668
|
+
self.encrypt_type = encrypt_type
|
18669
|
+
self.user_cmk_info = user_cmk_info
|
18670
|
+
|
18671
|
+
def validate(self):
|
18672
|
+
if self.user_cmk_info:
|
18673
|
+
self.user_cmk_info.validate()
|
18674
|
+
|
18675
|
+
def to_map(self):
|
18676
|
+
_map = super().to_map()
|
18677
|
+
if _map is not None:
|
18678
|
+
return _map
|
18679
|
+
|
18680
|
+
result = dict()
|
18681
|
+
if self.enable is not None:
|
18682
|
+
result['enable'] = self.enable
|
18683
|
+
if self.encrypt_type is not None:
|
18684
|
+
result['encryptType'] = self.encrypt_type
|
18685
|
+
if self.user_cmk_info is not None:
|
18686
|
+
result['userCmkInfo'] = self.user_cmk_info.to_map()
|
18687
|
+
return result
|
18688
|
+
|
18689
|
+
def from_map(self, m: dict = None):
|
18690
|
+
m = m or dict()
|
18691
|
+
if m.get('enable') is not None:
|
18692
|
+
self.enable = m.get('enable')
|
18693
|
+
if m.get('encryptType') is not None:
|
18694
|
+
self.encrypt_type = m.get('encryptType')
|
18695
|
+
if m.get('userCmkInfo') is not None:
|
18696
|
+
temp_model = UpdateLogStoreEncryptionRequestUserCmkInfo()
|
18697
|
+
self.user_cmk_info = temp_model.from_map(m['userCmkInfo'])
|
18698
|
+
return self
|
18699
|
+
|
18700
|
+
|
18701
|
+
class UpdateLogStoreEncryptionResponse(TeaModel):
|
18702
|
+
def __init__(
|
18703
|
+
self,
|
18704
|
+
headers: Dict[str, str] = None,
|
18705
|
+
status_code: int = None,
|
18706
|
+
):
|
18707
|
+
self.headers = headers
|
18708
|
+
self.status_code = status_code
|
18709
|
+
|
18710
|
+
def validate(self):
|
18711
|
+
pass
|
18712
|
+
|
18713
|
+
def to_map(self):
|
18714
|
+
_map = super().to_map()
|
18715
|
+
if _map is not None:
|
18716
|
+
return _map
|
18717
|
+
|
18718
|
+
result = dict()
|
18719
|
+
if self.headers is not None:
|
18720
|
+
result['headers'] = self.headers
|
18721
|
+
if self.status_code is not None:
|
18722
|
+
result['statusCode'] = self.status_code
|
18723
|
+
return result
|
18724
|
+
|
18725
|
+
def from_map(self, m: dict = None):
|
18726
|
+
m = m or dict()
|
18727
|
+
if m.get('headers') is not None:
|
18728
|
+
self.headers = m.get('headers')
|
18729
|
+
if m.get('statusCode') is not None:
|
18730
|
+
self.status_code = m.get('statusCode')
|
18731
|
+
return self
|
18732
|
+
|
18733
|
+
|
18171
18734
|
class UpdateLogStoreMeteringModeRequest(TeaModel):
|
18172
18735
|
def __init__(
|
18173
18736
|
self,
|
@@ -18371,24 +18934,49 @@ class UpdateLogtailPipelineConfigRequest(TeaModel):
|
|
18371
18934
|
processors: List[Dict[str, Any]] = None,
|
18372
18935
|
):
|
18373
18936
|
# The aggregation plug-ins.
|
18937
|
+
#
|
18938
|
+
# > This parameter takes effect only when extended plug-ins are used. You can use only one aggregation plug-in.
|
18374
18939
|
self.aggregators = aggregators
|
18375
18940
|
# The name of the configuration.
|
18376
18941
|
#
|
18942
|
+
# > The value of this parameter must be the same as the value of configName in the outer layer.
|
18943
|
+
#
|
18377
18944
|
# This parameter is required.
|
18378
18945
|
self.config_name = config_name
|
18379
|
-
# The
|
18946
|
+
# The output plug-ins.
|
18947
|
+
#
|
18948
|
+
# > You can use only one Simple Log Service output plug-in.
|
18380
18949
|
#
|
18381
18950
|
# This parameter is required.
|
18382
18951
|
self.flushers = flushers
|
18383
|
-
# The global
|
18952
|
+
# The global settings.
|
18953
|
+
#
|
18954
|
+
# **\
|
18955
|
+
#
|
18956
|
+
# ****\
|
18384
18957
|
self.global_ = global_
|
18385
|
-
# The
|
18958
|
+
# The input plug-ins.
|
18959
|
+
#
|
18960
|
+
# > You can configure only one input plug-in.
|
18386
18961
|
#
|
18387
18962
|
# This parameter is required.
|
18388
18963
|
self.inputs = inputs
|
18389
|
-
# The sample log.
|
18964
|
+
# The sample log. You can specify multiple sample logs.
|
18390
18965
|
self.log_sample = log_sample
|
18391
18966
|
# The processing plug-ins.
|
18967
|
+
#
|
18968
|
+
# > Logtail supports native plug-ins and extended plug-ins for data processing. For more information, see [Logtail plug-ins overview](https://help.aliyun.com/document_detail/64957.html).
|
18969
|
+
#
|
18970
|
+
# >
|
18971
|
+
#
|
18972
|
+
# * You can use native plug-ins only to collect text logs.
|
18973
|
+
#
|
18974
|
+
# * You cannot add native plug-ins and extended plug-ins at the same time.
|
18975
|
+
#
|
18976
|
+
# * When you add native plug-ins, take note of the following items:
|
18977
|
+
#
|
18978
|
+
# * You must add one of the following Logtail plug-ins for data processing as the first plug-in: Data Parsing (Regex Mode), Data Parsing (Delimiter Mode), Data Parsing (JSON Mode), Data Parsing (NGINX Mode), Data Parsing (Apache Mode), and Data Parsing (IIS Mode).
|
18979
|
+
# * After you add the first plug-in, you can add one Time Parsing plug-in, one Data Filtering plug-in, and multiple Data Masking plug-ins.
|
18392
18980
|
self.processors = processors
|
18393
18981
|
|
18394
18982
|
def validate(self):
|
@@ -18684,9 +19272,13 @@ class UpdateMetricStoreRequest(TeaModel):
|
|
18684
19272
|
mode: str = None,
|
18685
19273
|
ttl: int = None,
|
18686
19274
|
):
|
19275
|
+
# Specifies whether to enable automatic sharding.
|
18687
19276
|
self.auto_split = auto_split
|
19277
|
+
# The maximum number of shards into which existing shards can be automatically split. This parameter is valid only when you set the autoSplit parameter to true.
|
18688
19278
|
self.max_split_shard = max_split_shard
|
19279
|
+
# The type of the Metricstore.
|
18689
19280
|
self.mode = mode
|
19281
|
+
# The retention period of the metric data. Unit: days.
|
18690
19282
|
self.ttl = ttl
|
18691
19283
|
|
18692
19284
|
def validate(self):
|
@@ -19528,9 +20120,7 @@ class UpdateSavedSearchRequest(TeaModel):
|
|
19528
20120
|
#
|
19529
20121
|
# This parameter is required.
|
19530
20122
|
self.savedsearch_name = savedsearch_name
|
19531
|
-
# The
|
19532
|
-
#
|
19533
|
-
# For more information, see Log search overview and Log analysis overview.
|
20123
|
+
# The query statement of the saved search. A query statement consists of a search statement and an analytic statement in the Search statement|Analytic statement format. For more information, see [Log search overview](https://help.aliyun.com/document_detail/43772.html) and [Log analysis overview](https://help.aliyun.com/document_detail/53608.html).
|
19534
20124
|
#
|
19535
20125
|
# This parameter is required.
|
19536
20126
|
self.search_query = search_query
|