alibabacloud-sls20201230 5.2.3__py3-none-any.whl → 5.4.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -844,6 +844,96 @@ class Alert(TeaModel):
844
844
  return self
845
845
 
846
846
 
847
+ class ConsumeProcessorConfiguration(TeaModel):
848
+ def __init__(
849
+ self,
850
+ spl: str = None,
851
+ ):
852
+ # This parameter is required.
853
+ self.spl = spl
854
+
855
+ def validate(self):
856
+ pass
857
+
858
+ def to_map(self):
859
+ _map = super().to_map()
860
+ if _map is not None:
861
+ return _map
862
+
863
+ result = dict()
864
+ if self.spl is not None:
865
+ result['spl'] = self.spl
866
+ return result
867
+
868
+ def from_map(self, m: dict = None):
869
+ m = m or dict()
870
+ if m.get('spl') is not None:
871
+ self.spl = m.get('spl')
872
+ return self
873
+
874
+
875
+ class ConsumeProcessor(TeaModel):
876
+ def __init__(
877
+ self,
878
+ configuration: ConsumeProcessorConfiguration = None,
879
+ create_time: int = None,
880
+ description: str = None,
881
+ processor_id: str = None,
882
+ processor_name: str = None,
883
+ update_time: int = None,
884
+ ):
885
+ # This parameter is required.
886
+ self.configuration = configuration
887
+ self.create_time = create_time
888
+ self.description = description
889
+ # This parameter is required.
890
+ self.processor_id = processor_id
891
+ # This parameter is required.
892
+ self.processor_name = processor_name
893
+ self.update_time = update_time
894
+
895
+ def validate(self):
896
+ if self.configuration:
897
+ self.configuration.validate()
898
+
899
+ def to_map(self):
900
+ _map = super().to_map()
901
+ if _map is not None:
902
+ return _map
903
+
904
+ result = dict()
905
+ if self.configuration is not None:
906
+ result['configuration'] = self.configuration.to_map()
907
+ if self.create_time is not None:
908
+ result['createTime'] = self.create_time
909
+ if self.description is not None:
910
+ result['description'] = self.description
911
+ if self.processor_id is not None:
912
+ result['processorId'] = self.processor_id
913
+ if self.processor_name is not None:
914
+ result['processorName'] = self.processor_name
915
+ if self.update_time is not None:
916
+ result['updateTime'] = self.update_time
917
+ return result
918
+
919
+ def from_map(self, m: dict = None):
920
+ m = m or dict()
921
+ if m.get('configuration') is not None:
922
+ temp_model = ConsumeProcessorConfiguration()
923
+ self.configuration = temp_model.from_map(m['configuration'])
924
+ if m.get('createTime') is not None:
925
+ self.create_time = m.get('createTime')
926
+ if m.get('description') is not None:
927
+ self.description = m.get('description')
928
+ if m.get('processorId') is not None:
929
+ self.processor_id = m.get('processorId')
930
+ if m.get('processorName') is not None:
931
+ self.processor_name = m.get('processorName')
932
+ if m.get('updateTime') is not None:
933
+ self.update_time = m.get('updateTime')
934
+ return self
935
+
936
+
847
937
  class ConsumerGroup(TeaModel):
848
938
  def __init__(
849
939
  self,
@@ -1254,6 +1344,102 @@ class Histogram(TeaModel):
1254
1344
  return self
1255
1345
 
1256
1346
 
1347
+ class IngestProcessorConfiguration(TeaModel):
1348
+ def __init__(
1349
+ self,
1350
+ parse_fail: str = None,
1351
+ spl: str = None,
1352
+ ):
1353
+ self.parse_fail = parse_fail
1354
+ # This parameter is required.
1355
+ self.spl = spl
1356
+
1357
+ def validate(self):
1358
+ pass
1359
+
1360
+ def to_map(self):
1361
+ _map = super().to_map()
1362
+ if _map is not None:
1363
+ return _map
1364
+
1365
+ result = dict()
1366
+ if self.parse_fail is not None:
1367
+ result['parseFail'] = self.parse_fail
1368
+ if self.spl is not None:
1369
+ result['spl'] = self.spl
1370
+ return result
1371
+
1372
+ def from_map(self, m: dict = None):
1373
+ m = m or dict()
1374
+ if m.get('parseFail') is not None:
1375
+ self.parse_fail = m.get('parseFail')
1376
+ if m.get('spl') is not None:
1377
+ self.spl = m.get('spl')
1378
+ return self
1379
+
1380
+
1381
+ class IngestProcessor(TeaModel):
1382
+ def __init__(
1383
+ self,
1384
+ configuration: IngestProcessorConfiguration = None,
1385
+ create_time: int = None,
1386
+ description: str = None,
1387
+ processor_id: str = None,
1388
+ processor_name: str = None,
1389
+ update_time: int = None,
1390
+ ):
1391
+ # This parameter is required.
1392
+ self.configuration = configuration
1393
+ self.create_time = create_time
1394
+ self.description = description
1395
+ # This parameter is required.
1396
+ self.processor_id = processor_id
1397
+ # This parameter is required.
1398
+ self.processor_name = processor_name
1399
+ self.update_time = update_time
1400
+
1401
+ def validate(self):
1402
+ if self.configuration:
1403
+ self.configuration.validate()
1404
+
1405
+ def to_map(self):
1406
+ _map = super().to_map()
1407
+ if _map is not None:
1408
+ return _map
1409
+
1410
+ result = dict()
1411
+ if self.configuration is not None:
1412
+ result['configuration'] = self.configuration.to_map()
1413
+ if self.create_time is not None:
1414
+ result['createTime'] = self.create_time
1415
+ if self.description is not None:
1416
+ result['description'] = self.description
1417
+ if self.processor_id is not None:
1418
+ result['processorId'] = self.processor_id
1419
+ if self.processor_name is not None:
1420
+ result['processorName'] = self.processor_name
1421
+ if self.update_time is not None:
1422
+ result['updateTime'] = self.update_time
1423
+ return result
1424
+
1425
+ def from_map(self, m: dict = None):
1426
+ m = m or dict()
1427
+ if m.get('configuration') is not None:
1428
+ temp_model = IngestProcessorConfiguration()
1429
+ self.configuration = temp_model.from_map(m['configuration'])
1430
+ if m.get('createTime') is not None:
1431
+ self.create_time = m.get('createTime')
1432
+ if m.get('description') is not None:
1433
+ self.description = m.get('description')
1434
+ if m.get('processorId') is not None:
1435
+ self.processor_id = m.get('processorId')
1436
+ if m.get('processorName') is not None:
1437
+ self.processor_name = m.get('processorName')
1438
+ if m.get('updateTime') is not None:
1439
+ self.update_time = m.get('updateTime')
1440
+ return self
1441
+
1442
+
1257
1443
  class LogContent(TeaModel):
1258
1444
  def __init__(
1259
1445
  self,
@@ -2887,6 +3073,97 @@ class OSSIngestion(TeaModel):
2887
3073
  return self
2888
3074
 
2889
3075
 
3076
+ class ProcessorAssociate(TeaModel):
3077
+ def __init__(
3078
+ self,
3079
+ processor_id: str = None,
3080
+ ):
3081
+ # This parameter is required.
3082
+ self.processor_id = processor_id
3083
+
3084
+ def validate(self):
3085
+ pass
3086
+
3087
+ def to_map(self):
3088
+ _map = super().to_map()
3089
+ if _map is not None:
3090
+ return _map
3091
+
3092
+ result = dict()
3093
+ if self.processor_id is not None:
3094
+ result['processorId'] = self.processor_id
3095
+ return result
3096
+
3097
+ def from_map(self, m: dict = None):
3098
+ m = m or dict()
3099
+ if m.get('processorId') is not None:
3100
+ self.processor_id = m.get('processorId')
3101
+ return self
3102
+
3103
+
3104
+ class ProjectSummary(TeaModel):
3105
+ def __init__(
3106
+ self,
3107
+ create_time: int = None,
3108
+ description: str = None,
3109
+ project_name: str = None,
3110
+ region: str = None,
3111
+ resource_group_id: str = None,
3112
+ update_time: int = None,
3113
+ ):
3114
+ # This parameter is required.
3115
+ self.create_time = create_time
3116
+ # This parameter is required.
3117
+ self.description = description
3118
+ # This parameter is required.
3119
+ self.project_name = project_name
3120
+ # This parameter is required.
3121
+ self.region = region
3122
+ # This parameter is required.
3123
+ self.resource_group_id = resource_group_id
3124
+ # This parameter is required.
3125
+ self.update_time = update_time
3126
+
3127
+ def validate(self):
3128
+ pass
3129
+
3130
+ def to_map(self):
3131
+ _map = super().to_map()
3132
+ if _map is not None:
3133
+ return _map
3134
+
3135
+ result = dict()
3136
+ if self.create_time is not None:
3137
+ result['createTime'] = self.create_time
3138
+ if self.description is not None:
3139
+ result['description'] = self.description
3140
+ if self.project_name is not None:
3141
+ result['projectName'] = self.project_name
3142
+ if self.region is not None:
3143
+ result['region'] = self.region
3144
+ if self.resource_group_id is not None:
3145
+ result['resourceGroupId'] = self.resource_group_id
3146
+ if self.update_time is not None:
3147
+ result['updateTime'] = self.update_time
3148
+ return result
3149
+
3150
+ def from_map(self, m: dict = None):
3151
+ m = m or dict()
3152
+ if m.get('createTime') is not None:
3153
+ self.create_time = m.get('createTime')
3154
+ if m.get('description') is not None:
3155
+ self.description = m.get('description')
3156
+ if m.get('projectName') is not None:
3157
+ self.project_name = m.get('projectName')
3158
+ if m.get('region') is not None:
3159
+ self.region = m.get('region')
3160
+ if m.get('resourceGroupId') is not None:
3161
+ self.resource_group_id = m.get('resourceGroupId')
3162
+ if m.get('updateTime') is not None:
3163
+ self.update_time = m.get('updateTime')
3164
+ return self
3165
+
3166
+
2890
3167
  class SavedSearch(TeaModel):
2891
3168
  def __init__(
2892
3169
  self,
@@ -3743,6 +4020,7 @@ class Logstore(TeaModel):
3743
4020
  logstore_name: str = None,
3744
4021
  max_split_shard: int = None,
3745
4022
  mode: str = None,
4023
+ processor_id: str = None,
3746
4024
  product_type: str = None,
3747
4025
  shard_count: int = None,
3748
4026
  telemetry_type: str = None,
@@ -3760,6 +4038,7 @@ class Logstore(TeaModel):
3760
4038
  self.logstore_name = logstore_name
3761
4039
  self.max_split_shard = max_split_shard
3762
4040
  self.mode = mode
4041
+ self.processor_id = processor_id
3763
4042
  self.product_type = product_type
3764
4043
  # This parameter is required.
3765
4044
  self.shard_count = shard_count
@@ -3799,6 +4078,8 @@ class Logstore(TeaModel):
3799
4078
  result['maxSplitShard'] = self.max_split_shard
3800
4079
  if self.mode is not None:
3801
4080
  result['mode'] = self.mode
4081
+ if self.processor_id is not None:
4082
+ result['processorId'] = self.processor_id
3802
4083
  if self.product_type is not None:
3803
4084
  result['productType'] = self.product_type
3804
4085
  if self.shard_count is not None:
@@ -3834,6 +4115,8 @@ class Logstore(TeaModel):
3834
4115
  self.max_split_shard = m.get('maxSplitShard')
3835
4116
  if m.get('mode') is not None:
3836
4117
  self.mode = m.get('mode')
4118
+ if m.get('processorId') is not None:
4119
+ self.processor_id = m.get('processorId')
3837
4120
  if m.get('productType') is not None:
3838
4121
  self.product_type = m.get('productType')
3839
4122
  if m.get('shardCount') is not None:
@@ -4519,13 +4802,22 @@ class CreateAlertRequest(TeaModel):
4519
4802
  name: str = None,
4520
4803
  schedule: Schedule = None,
4521
4804
  ):
4805
+ # The detailed configurations of the alert rule.
4806
+ #
4522
4807
  # This parameter is required.
4523
4808
  self.configuration = configuration
4809
+ # The description of the alert rule.
4524
4810
  self.description = description
4811
+ # The display name of the alert rule.
4812
+ #
4525
4813
  # This parameter is required.
4526
4814
  self.display_name = display_name
4815
+ # The name of the alert rule. Make sure that the name is unique in a project.
4816
+ #
4527
4817
  # This parameter is required.
4528
4818
  self.name = name
4819
+ # The scheduling configurations of the alert rule.
4820
+ #
4529
4821
  # This parameter is required.
4530
4822
  self.schedule = schedule
4531
4823
 
@@ -5016,25 +5308,34 @@ class CreateDomainResponse(TeaModel):
5016
5308
  return self
5017
5309
 
5018
5310
 
5019
- class CreateETLRequest(TeaModel):
5311
+ class CreateDownloadJobRequestConfigurationSink(TeaModel):
5020
5312
  def __init__(
5021
5313
  self,
5022
- configuration: ETLConfiguration = None,
5023
- description: str = None,
5024
- display_name: str = None,
5025
- name: str = None,
5314
+ bucket: str = None,
5315
+ compression_type: str = None,
5316
+ content_type: str = None,
5317
+ prefix: str = None,
5318
+ role_arn: str = None,
5319
+ type: str = None,
5026
5320
  ):
5321
+ # 对象存储桶
5322
+ self.bucket = bucket
5323
+ # 压缩格式
5324
+ #
5027
5325
  # This parameter is required.
5028
- self.configuration = configuration
5029
- self.description = description
5326
+ self.compression_type = compression_type
5327
+ # 下载文件格式
5328
+ #
5030
5329
  # This parameter is required.
5031
- self.display_name = display_name
5330
+ self.content_type = content_type
5331
+ self.prefix = prefix
5332
+ # 下载使用roleArn
5333
+ self.role_arn = role_arn
5032
5334
  # This parameter is required.
5033
- self.name = name
5335
+ self.type = type
5034
5336
 
5035
5337
  def validate(self):
5036
- if self.configuration:
5037
- self.configuration.validate()
5338
+ pass
5038
5339
 
5039
5340
  def to_map(self):
5040
5341
  _map = super().to_map()
@@ -5042,13 +5343,242 @@ class CreateETLRequest(TeaModel):
5042
5343
  return _map
5043
5344
 
5044
5345
  result = dict()
5045
- if self.configuration is not None:
5046
- result['configuration'] = self.configuration.to_map()
5047
- if self.description is not None:
5048
- result['description'] = self.description
5049
- if self.display_name is not None:
5050
- result['displayName'] = self.display_name
5051
- if self.name is not None:
5346
+ if self.bucket is not None:
5347
+ result['bucket'] = self.bucket
5348
+ if self.compression_type is not None:
5349
+ result['compressionType'] = self.compression_type
5350
+ if self.content_type is not None:
5351
+ result['contentType'] = self.content_type
5352
+ if self.prefix is not None:
5353
+ result['prefix'] = self.prefix
5354
+ if self.role_arn is not None:
5355
+ result['roleArn'] = self.role_arn
5356
+ if self.type is not None:
5357
+ result['type'] = self.type
5358
+ return result
5359
+
5360
+ def from_map(self, m: dict = None):
5361
+ m = m or dict()
5362
+ if m.get('bucket') is not None:
5363
+ self.bucket = m.get('bucket')
5364
+ if m.get('compressionType') is not None:
5365
+ self.compression_type = m.get('compressionType')
5366
+ if m.get('contentType') is not None:
5367
+ self.content_type = m.get('contentType')
5368
+ if m.get('prefix') is not None:
5369
+ self.prefix = m.get('prefix')
5370
+ if m.get('roleArn') is not None:
5371
+ self.role_arn = m.get('roleArn')
5372
+ if m.get('type') is not None:
5373
+ self.type = m.get('type')
5374
+ return self
5375
+
5376
+
5377
+ class CreateDownloadJobRequestConfiguration(TeaModel):
5378
+ def __init__(
5379
+ self,
5380
+ allow_in_complete: bool = None,
5381
+ from_time: int = None,
5382
+ logstore: str = None,
5383
+ power_sql: bool = None,
5384
+ query: str = None,
5385
+ sink: CreateDownloadJobRequestConfigurationSink = None,
5386
+ to_time: int = None,
5387
+ ):
5388
+ # This parameter is required.
5389
+ self.allow_in_complete = allow_in_complete
5390
+ # 起点时间戳(精确到秒)
5391
+ #
5392
+ # This parameter is required.
5393
+ self.from_time = from_time
5394
+ # 源logstore
5395
+ #
5396
+ # This parameter is required.
5397
+ self.logstore = logstore
5398
+ # 是否启用powerSql
5399
+ self.power_sql = power_sql
5400
+ # 查询语句
5401
+ #
5402
+ # This parameter is required.
5403
+ self.query = query
5404
+ # 导出配置
5405
+ #
5406
+ # This parameter is required.
5407
+ self.sink = sink
5408
+ # 结束时间戳(精确到秒)
5409
+ #
5410
+ # This parameter is required.
5411
+ self.to_time = to_time
5412
+
5413
+ def validate(self):
5414
+ if self.sink:
5415
+ self.sink.validate()
5416
+
5417
+ def to_map(self):
5418
+ _map = super().to_map()
5419
+ if _map is not None:
5420
+ return _map
5421
+
5422
+ result = dict()
5423
+ if self.allow_in_complete is not None:
5424
+ result['allowInComplete'] = self.allow_in_complete
5425
+ if self.from_time is not None:
5426
+ result['fromTime'] = self.from_time
5427
+ if self.logstore is not None:
5428
+ result['logstore'] = self.logstore
5429
+ if self.power_sql is not None:
5430
+ result['powerSql'] = self.power_sql
5431
+ if self.query is not None:
5432
+ result['query'] = self.query
5433
+ if self.sink is not None:
5434
+ result['sink'] = self.sink.to_map()
5435
+ if self.to_time is not None:
5436
+ result['toTime'] = self.to_time
5437
+ return result
5438
+
5439
+ def from_map(self, m: dict = None):
5440
+ m = m or dict()
5441
+ if m.get('allowInComplete') is not None:
5442
+ self.allow_in_complete = m.get('allowInComplete')
5443
+ if m.get('fromTime') is not None:
5444
+ self.from_time = m.get('fromTime')
5445
+ if m.get('logstore') is not None:
5446
+ self.logstore = m.get('logstore')
5447
+ if m.get('powerSql') is not None:
5448
+ self.power_sql = m.get('powerSql')
5449
+ if m.get('query') is not None:
5450
+ self.query = m.get('query')
5451
+ if m.get('sink') is not None:
5452
+ temp_model = CreateDownloadJobRequestConfigurationSink()
5453
+ self.sink = temp_model.from_map(m['sink'])
5454
+ if m.get('toTime') is not None:
5455
+ self.to_time = m.get('toTime')
5456
+ return self
5457
+
5458
+
5459
+ class CreateDownloadJobRequest(TeaModel):
5460
+ def __init__(
5461
+ self,
5462
+ configuration: CreateDownloadJobRequestConfiguration = None,
5463
+ description: str = None,
5464
+ display_name: str = None,
5465
+ name: str = None,
5466
+ ):
5467
+ # 下载配置
5468
+ #
5469
+ # This parameter is required.
5470
+ self.configuration = configuration
5471
+ # 任务描述
5472
+ self.description = description
5473
+ # 任务显示名称
5474
+ #
5475
+ # This parameter is required.
5476
+ self.display_name = display_name
5477
+ # 代表资源名称的资源属性字段
5478
+ #
5479
+ # This parameter is required.
5480
+ self.name = name
5481
+
5482
+ def validate(self):
5483
+ if self.configuration:
5484
+ self.configuration.validate()
5485
+
5486
+ def to_map(self):
5487
+ _map = super().to_map()
5488
+ if _map is not None:
5489
+ return _map
5490
+
5491
+ result = dict()
5492
+ if self.configuration is not None:
5493
+ result['configuration'] = self.configuration.to_map()
5494
+ if self.description is not None:
5495
+ result['description'] = self.description
5496
+ if self.display_name is not None:
5497
+ result['displayName'] = self.display_name
5498
+ if self.name is not None:
5499
+ result['name'] = self.name
5500
+ return result
5501
+
5502
+ def from_map(self, m: dict = None):
5503
+ m = m or dict()
5504
+ if m.get('configuration') is not None:
5505
+ temp_model = CreateDownloadJobRequestConfiguration()
5506
+ self.configuration = temp_model.from_map(m['configuration'])
5507
+ if m.get('description') is not None:
5508
+ self.description = m.get('description')
5509
+ if m.get('displayName') is not None:
5510
+ self.display_name = m.get('displayName')
5511
+ if m.get('name') is not None:
5512
+ self.name = m.get('name')
5513
+ return self
5514
+
5515
+
5516
+ class CreateDownloadJobResponse(TeaModel):
5517
+ def __init__(
5518
+ self,
5519
+ headers: Dict[str, str] = None,
5520
+ status_code: int = None,
5521
+ ):
5522
+ self.headers = headers
5523
+ self.status_code = status_code
5524
+
5525
+ def validate(self):
5526
+ pass
5527
+
5528
+ def to_map(self):
5529
+ _map = super().to_map()
5530
+ if _map is not None:
5531
+ return _map
5532
+
5533
+ result = dict()
5534
+ if self.headers is not None:
5535
+ result['headers'] = self.headers
5536
+ if self.status_code is not None:
5537
+ result['statusCode'] = self.status_code
5538
+ return result
5539
+
5540
+ def from_map(self, m: dict = None):
5541
+ m = m or dict()
5542
+ if m.get('headers') is not None:
5543
+ self.headers = m.get('headers')
5544
+ if m.get('statusCode') is not None:
5545
+ self.status_code = m.get('statusCode')
5546
+ return self
5547
+
5548
+
5549
+ class CreateETLRequest(TeaModel):
5550
+ def __init__(
5551
+ self,
5552
+ configuration: ETLConfiguration = None,
5553
+ description: str = None,
5554
+ display_name: str = None,
5555
+ name: str = None,
5556
+ ):
5557
+ # This parameter is required.
5558
+ self.configuration = configuration
5559
+ self.description = description
5560
+ # This parameter is required.
5561
+ self.display_name = display_name
5562
+ # This parameter is required.
5563
+ self.name = name
5564
+
5565
+ def validate(self):
5566
+ if self.configuration:
5567
+ self.configuration.validate()
5568
+
5569
+ def to_map(self):
5570
+ _map = super().to_map()
5571
+ if _map is not None:
5572
+ return _map
5573
+
5574
+ result = dict()
5575
+ if self.configuration is not None:
5576
+ result['configuration'] = self.configuration.to_map()
5577
+ if self.description is not None:
5578
+ result['description'] = self.description
5579
+ if self.display_name is not None:
5580
+ result['displayName'] = self.display_name
5581
+ if self.name is not None:
5052
5582
  result['name'] = self.name
5053
5583
  return result
5054
5584
 
@@ -5289,31 +5819,33 @@ class CreateLogStoreRequest(TeaModel):
5289
5819
  logstore_name: str = None,
5290
5820
  max_split_shard: int = None,
5291
5821
  mode: str = None,
5822
+ processor_id: str = None,
5292
5823
  shard_count: int = None,
5293
5824
  telemetry_type: str = None,
5294
5825
  ttl: int = None,
5295
5826
  ):
5296
- # Specifies whether to record public IP addresses. Default value: false. Valid values:
5827
+ # Specifies whether to record public IP addresses. Default value: false.
5297
5828
  #
5298
5829
  # * true
5299
5830
  # * false
5300
5831
  self.append_meta = append_meta
5301
- # Specifies whether to enable automatic sharding. Valid values:
5832
+ # Specifies whether to enable automatic sharding.
5302
5833
  #
5303
5834
  # * true
5304
5835
  # * false
5305
5836
  self.auto_split = auto_split
5306
- # Specifies whether to enable the web tracking feature. Default value: false. Valid values:
5837
+ # Specifies whether to enable the web tracking feature. Default value: false.
5307
5838
  #
5308
5839
  # * true
5309
5840
  # * false
5310
5841
  self.enable_tracking = enable_tracking
5311
- # The data structure of the encryption configuration.
5842
+ # The data structure of the encryption configuration. The following parameters are included: `enable`, `encrypt_type`, and `user_cmk_info`. For more information, see [EncryptConf](https://help.aliyun.com/document_detail/409461.html).
5312
5843
  self.encrypt_conf = encrypt_conf
5313
- # The retention period of data in the hot storage tier of the Logstore. Unit: days. You can specify a value that ranges from 30 to the value of ttl.
5844
+ # The retention period of data in the hot storage tier of the Logstore. Valid values: 7 to 3000. Unit: days.
5314
5845
  #
5315
- # Hot data that is stored for longer than the period specified by hot_ttl is converted to cold data. For more information, see [Enable hot and cold-tiered storage for a Logstore](https://help.aliyun.com/document_detail/308645.html).
5846
+ # After the retention period that is specified for the hot storage tier elapses, the data is moved to the Infrequent Access (IA) storage tier. For more information, see [Enable hot and cold-tiered storage for a Logstore](https://help.aliyun.com/document_detail/308645.html).
5316
5847
  self.hot_ttl = hot_ttl
5848
+ # The retention period of data in the IA storage tier of the Logstore. You must set this parameter to at least 30 days. After the data retention period that you specify for the IA storage tier elapses, the data is moved to the Archive storage tier.
5317
5849
  self.infrequent_access_ttl = infrequent_access_ttl
5318
5850
  # The name of the Logstore. The name must meet the following requirements:
5319
5851
  #
@@ -5324,27 +5856,28 @@ class CreateLogStoreRequest(TeaModel):
5324
5856
  #
5325
5857
  # This parameter is required.
5326
5858
  self.logstore_name = logstore_name
5327
- # The maximum number of shards into which existing shards can be automatically split. Valid values: 1 to 64.
5859
+ # The maximum number of shards into which existing shards can be automatically split. Valid values: 1 to 256.
5328
5860
  #
5329
- # > If you set autoSplit to true, you must configure this parameter.
5861
+ # > If you set autoSplit to true, you must specify maxSplitShard.
5330
5862
  self.max_split_shard = max_split_shard
5331
- # The type of the Logstore. Log Service provides the following types of Logstores: Standard Logstores and Query Logstores. Valid values:
5863
+ # The type of the Logstore. Simple Log Service provides two types of Logstores: Standard Logstores and Query Logstores. Valid values:
5332
5864
  #
5333
5865
  # * **standard**: Standard Logstore. This type of Logstore supports the log analysis feature and is suitable for scenarios such as real-time monitoring and interactive analysis. You can also use this type of Logstore to build a comprehensive observability system.
5334
- # * **query**: Query Logstore. This type of Logstore supports high-performance queries. The index traffic fee of a Query Logstore is approximately half that of a Standard Logstore. Query Logstores do not support SQL analysis. Query Logstores are suitable for scenarios in which the amount of data is large, the log retention period is long, or log analysis is not required. Log retention periods of weeks or months are considered long.
5866
+ # * **query**: Query Logstore. This type of Logstore supports high-performance queries. The index traffic fee of a query Logstore is approximately half that of a Standard Logstore. Query Logstores do not support SQL analysis. Query Logstores are suitable for scenarios in which the amount of data is large, the log retention period is long, or log analysis is not required. If logs are stored for weeks or months, the log retention period is considered long.
5335
5867
  self.mode = mode
5868
+ self.processor_id = processor_id
5336
5869
  # The number of shards.
5337
5870
  #
5338
- # > You cannot call the CreateLogStore operation to change the number of shards. You can call the SplitShard or MergeShards operation to change the number of shards.
5871
+ # > You cannot call the CreateLogStore operation to change the number of shards. You can call the SplitShard or MergeShards operation to change the number of shards.
5339
5872
  #
5340
5873
  # This parameter is required.
5341
5874
  self.shard_count = shard_count
5342
5875
  # The type of the observable data. Valid values:
5343
5876
  #
5344
- # * None: logs
5345
- # * Metrics: metrics
5877
+ # * **None** (default): log data
5878
+ # * **Metrics**: metric data
5346
5879
  self.telemetry_type = telemetry_type
5347
- # The retention period of data. Unit: days. Valid values: 1 to 3000. If you set this parameter to 3650, data is permanently stored.
5880
+ # The retention period of data. Unit: days. Valid values: 1 to 3000. If you set this parameter to 3650, logs are permanently stored.
5348
5881
  #
5349
5882
  # This parameter is required.
5350
5883
  self.ttl = ttl
@@ -5377,6 +5910,8 @@ class CreateLogStoreRequest(TeaModel):
5377
5910
  result['maxSplitShard'] = self.max_split_shard
5378
5911
  if self.mode is not None:
5379
5912
  result['mode'] = self.mode
5913
+ if self.processor_id is not None:
5914
+ result['processorId'] = self.processor_id
5380
5915
  if self.shard_count is not None:
5381
5916
  result['shardCount'] = self.shard_count
5382
5917
  if self.telemetry_type is not None:
@@ -5406,6 +5941,8 @@ class CreateLogStoreRequest(TeaModel):
5406
5941
  self.max_split_shard = m.get('maxSplitShard')
5407
5942
  if m.get('mode') is not None:
5408
5943
  self.mode = m.get('mode')
5944
+ if m.get('processorId') is not None:
5945
+ self.processor_id = m.get('processorId')
5409
5946
  if m.get('shardCount') is not None:
5410
5947
  self.shard_count = m.get('shardCount')
5411
5948
  if m.get('telemetryType') is not None:
@@ -5828,25 +6365,30 @@ class CreateMachineGroupResponse(TeaModel):
5828
6365
  return self
5829
6366
 
5830
6367
 
5831
- class CreateOSSExportRequest(TeaModel):
6368
+ class CreateMetricStoreRequest(TeaModel):
5832
6369
  def __init__(
5833
6370
  self,
5834
- configuration: OSSExportConfiguration = None,
5835
- description: str = None,
5836
- display_name: str = None,
6371
+ auto_split: bool = None,
6372
+ max_split_shard: int = None,
6373
+ metric_type: str = None,
6374
+ mode: str = None,
5837
6375
  name: str = None,
6376
+ shard_count: int = None,
6377
+ ttl: int = None,
5838
6378
  ):
6379
+ self.auto_split = auto_split
6380
+ self.max_split_shard = max_split_shard
6381
+ self.metric_type = metric_type
6382
+ self.mode = mode
5839
6383
  # This parameter is required.
5840
- self.configuration = configuration
5841
- self.description = description
6384
+ self.name = name
5842
6385
  # This parameter is required.
5843
- self.display_name = display_name
6386
+ self.shard_count = shard_count
5844
6387
  # This parameter is required.
5845
- self.name = name
6388
+ self.ttl = ttl
5846
6389
 
5847
6390
  def validate(self):
5848
- if self.configuration:
5849
- self.configuration.validate()
6391
+ pass
5850
6392
 
5851
6393
  def to_map(self):
5852
6394
  _map = super().to_map()
@@ -5854,17 +6396,118 @@ class CreateOSSExportRequest(TeaModel):
5854
6396
  return _map
5855
6397
 
5856
6398
  result = dict()
5857
- if self.configuration is not None:
5858
- result['configuration'] = self.configuration.to_map()
5859
- if self.description is not None:
5860
- result['description'] = self.description
5861
- if self.display_name is not None:
5862
- result['displayName'] = self.display_name
6399
+ if self.auto_split is not None:
6400
+ result['autoSplit'] = self.auto_split
6401
+ if self.max_split_shard is not None:
6402
+ result['maxSplitShard'] = self.max_split_shard
6403
+ if self.metric_type is not None:
6404
+ result['metricType'] = self.metric_type
6405
+ if self.mode is not None:
6406
+ result['mode'] = self.mode
5863
6407
  if self.name is not None:
5864
6408
  result['name'] = self.name
5865
- return result
5866
-
5867
- def from_map(self, m: dict = None):
6409
+ if self.shard_count is not None:
6410
+ result['shardCount'] = self.shard_count
6411
+ if self.ttl is not None:
6412
+ result['ttl'] = self.ttl
6413
+ return result
6414
+
6415
+ def from_map(self, m: dict = None):
6416
+ m = m or dict()
6417
+ if m.get('autoSplit') is not None:
6418
+ self.auto_split = m.get('autoSplit')
6419
+ if m.get('maxSplitShard') is not None:
6420
+ self.max_split_shard = m.get('maxSplitShard')
6421
+ if m.get('metricType') is not None:
6422
+ self.metric_type = m.get('metricType')
6423
+ if m.get('mode') is not None:
6424
+ self.mode = m.get('mode')
6425
+ if m.get('name') is not None:
6426
+ self.name = m.get('name')
6427
+ if m.get('shardCount') is not None:
6428
+ self.shard_count = m.get('shardCount')
6429
+ if m.get('ttl') is not None:
6430
+ self.ttl = m.get('ttl')
6431
+ return self
6432
+
6433
+
6434
+ class CreateMetricStoreResponse(TeaModel):
6435
+ def __init__(
6436
+ self,
6437
+ headers: Dict[str, str] = None,
6438
+ status_code: int = None,
6439
+ ):
6440
+ self.headers = headers
6441
+ self.status_code = status_code
6442
+
6443
+ def validate(self):
6444
+ pass
6445
+
6446
+ def to_map(self):
6447
+ _map = super().to_map()
6448
+ if _map is not None:
6449
+ return _map
6450
+
6451
+ result = dict()
6452
+ if self.headers is not None:
6453
+ result['headers'] = self.headers
6454
+ if self.status_code is not None:
6455
+ result['statusCode'] = self.status_code
6456
+ return result
6457
+
6458
+ def from_map(self, m: dict = None):
6459
+ m = m or dict()
6460
+ if m.get('headers') is not None:
6461
+ self.headers = m.get('headers')
6462
+ if m.get('statusCode') is not None:
6463
+ self.status_code = m.get('statusCode')
6464
+ return self
6465
+
6466
+
6467
+ class CreateOSSExportRequest(TeaModel):
6468
+ def __init__(
6469
+ self,
6470
+ configuration: OSSExportConfiguration = None,
6471
+ description: str = None,
6472
+ display_name: str = None,
6473
+ name: str = None,
6474
+ ):
6475
+ # The configuration details of the job.
6476
+ #
6477
+ # This parameter is required.
6478
+ self.configuration = configuration
6479
+ # The description of the job.
6480
+ self.description = description
6481
+ # The display name of the job.
6482
+ #
6483
+ # This parameter is required.
6484
+ self.display_name = display_name
6485
+ # The unique identifier of the OSS data shipping job.
6486
+ #
6487
+ # This parameter is required.
6488
+ self.name = name
6489
+
6490
+ def validate(self):
6491
+ if self.configuration:
6492
+ self.configuration.validate()
6493
+
6494
+ def to_map(self):
6495
+ _map = super().to_map()
6496
+ if _map is not None:
6497
+ return _map
6498
+
6499
+ result = dict()
6500
+ if self.configuration is not None:
6501
+ result['configuration'] = self.configuration.to_map()
6502
+ if self.description is not None:
6503
+ result['description'] = self.description
6504
+ if self.display_name is not None:
6505
+ result['displayName'] = self.display_name
6506
+ if self.name is not None:
6507
+ result['name'] = self.name
6508
+ return result
6509
+
6510
+ def from_map(self, m: dict = None):
5868
6511
  m = m or dict()
5869
6512
  if m.get('configuration') is not None:
5870
6513
  temp_model = OSSExportConfiguration()
@@ -5919,11 +6562,18 @@ class CreateOSSHDFSExportRequest(TeaModel):
5919
6562
  display_name: str = None,
5920
6563
  name: str = None,
5921
6564
  ):
6565
+ # The configuration details of the job.
6566
+ #
5922
6567
  # This parameter is required.
5923
6568
  self.configuration = configuration
6569
+ # The description of the job.
5924
6570
  self.description = description
6571
+ # The display name of the job.
6572
+ #
5925
6573
  # This parameter is required.
5926
6574
  self.display_name = display_name
6575
+ # The unique identifier of the OSS data shipping job.
6576
+ #
5927
6577
  # This parameter is required.
5928
6578
  self.name = name
5929
6579
 
@@ -6096,7 +6746,7 @@ class CreateOssExternalStoreRequestParameterColumns(TeaModel):
6096
6746
  #
6097
6747
  # This parameter is required.
6098
6748
  self.name = name
6099
- # The type of the field.
6749
+ # The data type of the field.
6100
6750
  #
6101
6751
  # This parameter is required.
6102
6752
  self.type = type
@@ -6135,11 +6785,11 @@ class CreateOssExternalStoreRequestParameter(TeaModel):
6135
6785
  endpoint: str = None,
6136
6786
  objects: List[str] = None,
6137
6787
  ):
6138
- # The AccessKey ID of your account.
6788
+ # The AccessKey ID.
6139
6789
  #
6140
6790
  # This parameter is required.
6141
6791
  self.accessid = accessid
6142
- # The AccessKey secret of your account.
6792
+ # The AccessKey secret.
6143
6793
  #
6144
6794
  # This parameter is required.
6145
6795
  self.accesskey = accesskey
@@ -6147,15 +6797,15 @@ class CreateOssExternalStoreRequestParameter(TeaModel):
6147
6797
  #
6148
6798
  # This parameter is required.
6149
6799
  self.bucket = bucket
6150
- # The fields that are associated to the external store.
6800
+ # The associated fields.
6151
6801
  #
6152
6802
  # This parameter is required.
6153
6803
  self.columns = columns
6154
- # The Object Storage Service (OSS) endpoint.
6804
+ # The OSS endpoint. For more information, see [Regions and endpoints](https://help.aliyun.com/document_detail/31837.html).
6155
6805
  #
6156
6806
  # This parameter is required.
6157
6807
  self.endpoint = endpoint
6158
- # The names of the OSS objects that are associated to the external store.
6808
+ # The associated OSS objects. Valid values of n: 1 to 100.
6159
6809
  #
6160
6810
  # This parameter is required.
6161
6811
  self.objects = objects
@@ -6219,7 +6869,7 @@ class CreateOssExternalStoreRequest(TeaModel):
6219
6869
  #
6220
6870
  # This parameter is required.
6221
6871
  self.external_store_name = external_store_name
6222
- # The parameters that are configured for the external store.
6872
+ # The parameters of the external store.
6223
6873
  #
6224
6874
  # This parameter is required.
6225
6875
  self.parameter = parameter
@@ -6757,8 +7407,12 @@ class CreateSqlInstanceRequest(TeaModel):
6757
7407
  cu: int = None,
6758
7408
  use_as_default: bool = None,
6759
7409
  ):
7410
+ # The number of compute units (CUs). When you use the Dedicated SQL feature, CUs are used in parallel.
7411
+ #
6760
7412
  # This parameter is required.
6761
7413
  self.cu = cu
7414
+ # Specifies whether to enable the Dedicated SQL feature for the project. If you set this parameter to true, the Dedicated SQL feature is enabled for the specified project and takes effect for all query statements that you execute in the project, including the query statements for alerts and dashboards.
7415
+ #
6762
7416
  # This parameter is required.
6763
7417
  self.use_as_default = use_as_default
6764
7418
 
@@ -6826,10 +7480,20 @@ class CreateStoreViewRequest(TeaModel):
6826
7480
  store_type: str = None,
6827
7481
  stores: List[StoreViewStore] = None,
6828
7482
  ):
7483
+ # The name of the dataset.
7484
+ #
7485
+ # * The name can contain lowercase letters, digits, and underscores (_).
7486
+ # * The name must start with a lowercase letter.
7487
+ # * The name must be 3 to 62 characters in length.
7488
+ #
6829
7489
  # This parameter is required.
6830
7490
  self.name = name
7491
+ # The type of the dataset. Valid values: metricstore and logstore.
7492
+ #
6831
7493
  # This parameter is required.
6832
7494
  self.store_type = store_type
7495
+ # The Logstores or Metricstores.
7496
+ #
6833
7497
  # This parameter is required.
6834
7498
  self.stores = stores
6835
7499
 
@@ -7333,6 +7997,39 @@ class DeleteDomainResponse(TeaModel):
7333
7997
  return self
7334
7998
 
7335
7999
 
8000
+ class DeleteDownloadJobResponse(TeaModel):
8001
+ def __init__(
8002
+ self,
8003
+ headers: Dict[str, str] = None,
8004
+ status_code: int = None,
8005
+ ):
8006
+ self.headers = headers
8007
+ self.status_code = status_code
8008
+
8009
+ def validate(self):
8010
+ pass
8011
+
8012
+ def to_map(self):
8013
+ _map = super().to_map()
8014
+ if _map is not None:
8015
+ return _map
8016
+
8017
+ result = dict()
8018
+ if self.headers is not None:
8019
+ result['headers'] = self.headers
8020
+ if self.status_code is not None:
8021
+ result['statusCode'] = self.status_code
8022
+ return result
8023
+
8024
+ def from_map(self, m: dict = None):
8025
+ m = m or dict()
8026
+ if m.get('headers') is not None:
8027
+ self.headers = m.get('headers')
8028
+ if m.get('statusCode') is not None:
8029
+ self.status_code = m.get('statusCode')
8030
+ return self
8031
+
8032
+
7336
8033
  class DeleteETLResponse(TeaModel):
7337
8034
  def __init__(
7338
8035
  self,
@@ -7564,6 +8261,39 @@ class DeleteMachineGroupResponse(TeaModel):
7564
8261
  return self
7565
8262
 
7566
8263
 
8264
+ class DeleteMetricStoreResponse(TeaModel):
8265
+ def __init__(
8266
+ self,
8267
+ headers: Dict[str, str] = None,
8268
+ status_code: int = None,
8269
+ ):
8270
+ self.headers = headers
8271
+ self.status_code = status_code
8272
+
8273
+ def validate(self):
8274
+ pass
8275
+
8276
+ def to_map(self):
8277
+ _map = super().to_map()
8278
+ if _map is not None:
8279
+ return _map
8280
+
8281
+ result = dict()
8282
+ if self.headers is not None:
8283
+ result['headers'] = self.headers
8284
+ if self.status_code is not None:
8285
+ result['statusCode'] = self.status_code
8286
+ return result
8287
+
8288
+ def from_map(self, m: dict = None):
8289
+ m = m or dict()
8290
+ if m.get('headers') is not None:
8291
+ self.headers = m.get('headers')
8292
+ if m.get('statusCode') is not None:
8293
+ self.status_code = m.get('statusCode')
8294
+ return self
8295
+
8296
+
7567
8297
  class DeleteOSSExportResponse(TeaModel):
7568
8298
  def __init__(
7569
8299
  self,
@@ -8810,11 +9540,11 @@ class GetContextLogsRequest(TeaModel):
8810
9540
  pack_meta: str = None,
8811
9541
  type: str = None,
8812
9542
  ):
8813
- # The number of logs that you want to obtain and are generated before the generation time of the start log. Valid values: (0,100].
9543
+ # The number of logs that you want to obtain and are generated before the generation time of the start log. Valid values: `(0,100]`.
8814
9544
  #
8815
9545
  # This parameter is required.
8816
9546
  self.back_lines = back_lines
8817
- # The number of logs that you want to obtain and are generated after the generation time of the start log. Valid values: (0,100].
9547
+ # The number of logs that you want to obtain and are generated after the generation time of the start log. Valid values: `(0,100]`.
8818
9548
  #
8819
9549
  # This parameter is required.
8820
9550
  self.forward_lines = forward_lines
@@ -9206,20 +9936,29 @@ class GetDashboardResponse(TeaModel):
9206
9936
  return self
9207
9937
 
9208
9938
 
9209
- class GetETLResponse(TeaModel):
9939
+ class GetDownloadJobResponseBodyConfigurationSink(TeaModel):
9210
9940
  def __init__(
9211
9941
  self,
9212
- headers: Dict[str, str] = None,
9213
- status_code: int = None,
9214
- body: ETL = None,
9942
+ bucket: str = None,
9943
+ compression_type: str = None,
9944
+ content_type: str = None,
9945
+ prefix: str = None,
9946
+ role_arn: str = None,
9947
+ type: str = None,
9215
9948
  ):
9216
- self.headers = headers
9217
- self.status_code = status_code
9218
- self.body = body
9949
+ # 对象存储桶
9950
+ self.bucket = bucket
9951
+ # 压缩格式
9952
+ self.compression_type = compression_type
9953
+ # 下载文件格式
9954
+ self.content_type = content_type
9955
+ self.prefix = prefix
9956
+ # 下载使用roleArn
9957
+ self.role_arn = role_arn
9958
+ self.type = type
9219
9959
 
9220
9960
  def validate(self):
9221
- if self.body:
9222
- self.body.validate()
9961
+ pass
9223
9962
 
9224
9963
  def to_map(self):
9225
9964
  _map = super().to_map()
@@ -9227,40 +9966,348 @@ class GetETLResponse(TeaModel):
9227
9966
  return _map
9228
9967
 
9229
9968
  result = dict()
9230
- if self.headers is not None:
9231
- result['headers'] = self.headers
9232
- if self.status_code is not None:
9233
- result['statusCode'] = self.status_code
9234
- if self.body is not None:
9235
- result['body'] = self.body.to_map()
9969
+ if self.bucket is not None:
9970
+ result['bucket'] = self.bucket
9971
+ if self.compression_type is not None:
9972
+ result['compressionType'] = self.compression_type
9973
+ if self.content_type is not None:
9974
+ result['contentType'] = self.content_type
9975
+ if self.prefix is not None:
9976
+ result['prefix'] = self.prefix
9977
+ if self.role_arn is not None:
9978
+ result['roleArn'] = self.role_arn
9979
+ if self.type is not None:
9980
+ result['type'] = self.type
9236
9981
  return result
9237
9982
 
9238
9983
  def from_map(self, m: dict = None):
9239
9984
  m = m or dict()
9240
- if m.get('headers') is not None:
9241
- self.headers = m.get('headers')
9242
- if m.get('statusCode') is not None:
9243
- self.status_code = m.get('statusCode')
9244
- if m.get('body') is not None:
9245
- temp_model = ETL()
9246
- self.body = temp_model.from_map(m['body'])
9985
+ if m.get('bucket') is not None:
9986
+ self.bucket = m.get('bucket')
9987
+ if m.get('compressionType') is not None:
9988
+ self.compression_type = m.get('compressionType')
9989
+ if m.get('contentType') is not None:
9990
+ self.content_type = m.get('contentType')
9991
+ if m.get('prefix') is not None:
9992
+ self.prefix = m.get('prefix')
9993
+ if m.get('roleArn') is not None:
9994
+ self.role_arn = m.get('roleArn')
9995
+ if m.get('type') is not None:
9996
+ self.type = m.get('type')
9247
9997
  return self
9248
9998
 
9249
9999
 
9250
- class GetExternalStoreResponse(TeaModel):
10000
+ class GetDownloadJobResponseBodyConfiguration(TeaModel):
9251
10001
  def __init__(
9252
10002
  self,
9253
- headers: Dict[str, str] = None,
9254
- status_code: int = None,
9255
- body: ExternalStore = None,
10003
+ allow_in_complete: bool = None,
10004
+ from_time: int = None,
10005
+ logstore: str = None,
10006
+ power_sql: bool = None,
10007
+ query: str = None,
10008
+ sink: GetDownloadJobResponseBodyConfigurationSink = None,
10009
+ to_time: int = None,
9256
10010
  ):
9257
- self.headers = headers
9258
- self.status_code = status_code
9259
- self.body = body
10011
+ self.allow_in_complete = allow_in_complete
10012
+ # 起点时间戳(精确到秒)
10013
+ self.from_time = from_time
10014
+ # 源logstore
10015
+ self.logstore = logstore
10016
+ # 是否启用powerSql
10017
+ self.power_sql = power_sql
10018
+ # 查询语句
10019
+ self.query = query
10020
+ # 导出配置
10021
+ self.sink = sink
10022
+ # 结束时间戳(精确到秒)
10023
+ self.to_time = to_time
9260
10024
 
9261
10025
  def validate(self):
9262
- if self.body:
9263
- self.body.validate()
10026
+ if self.sink:
10027
+ self.sink.validate()
10028
+
10029
+ def to_map(self):
10030
+ _map = super().to_map()
10031
+ if _map is not None:
10032
+ return _map
10033
+
10034
+ result = dict()
10035
+ if self.allow_in_complete is not None:
10036
+ result['allowInComplete'] = self.allow_in_complete
10037
+ if self.from_time is not None:
10038
+ result['fromTime'] = self.from_time
10039
+ if self.logstore is not None:
10040
+ result['logstore'] = self.logstore
10041
+ if self.power_sql is not None:
10042
+ result['powerSql'] = self.power_sql
10043
+ if self.query is not None:
10044
+ result['query'] = self.query
10045
+ if self.sink is not None:
10046
+ result['sink'] = self.sink.to_map()
10047
+ if self.to_time is not None:
10048
+ result['toTime'] = self.to_time
10049
+ return result
10050
+
10051
+ def from_map(self, m: dict = None):
10052
+ m = m or dict()
10053
+ if m.get('allowInComplete') is not None:
10054
+ self.allow_in_complete = m.get('allowInComplete')
10055
+ if m.get('fromTime') is not None:
10056
+ self.from_time = m.get('fromTime')
10057
+ if m.get('logstore') is not None:
10058
+ self.logstore = m.get('logstore')
10059
+ if m.get('powerSql') is not None:
10060
+ self.power_sql = m.get('powerSql')
10061
+ if m.get('query') is not None:
10062
+ self.query = m.get('query')
10063
+ if m.get('sink') is not None:
10064
+ temp_model = GetDownloadJobResponseBodyConfigurationSink()
10065
+ self.sink = temp_model.from_map(m['sink'])
10066
+ if m.get('toTime') is not None:
10067
+ self.to_time = m.get('toTime')
10068
+ return self
10069
+
10070
+
10071
+ class GetDownloadJobResponseBodyExecutionDetails(TeaModel):
10072
+ def __init__(
10073
+ self,
10074
+ check_sum: str = None,
10075
+ error_message: str = None,
10076
+ execute_time: int = None,
10077
+ file_path: str = None,
10078
+ file_size: int = None,
10079
+ log_count: int = None,
10080
+ progress: int = None,
10081
+ ):
10082
+ self.check_sum = check_sum
10083
+ # 下载错误信息
10084
+ self.error_message = error_message
10085
+ # 下载执行时间
10086
+ self.execute_time = execute_time
10087
+ # 下载结果链接
10088
+ self.file_path = file_path
10089
+ # 下载文件大小
10090
+ self.file_size = file_size
10091
+ # 下载日志条数
10092
+ self.log_count = log_count
10093
+ # 下载进度
10094
+ self.progress = progress
10095
+
10096
+ def validate(self):
10097
+ pass
10098
+
10099
+ def to_map(self):
10100
+ _map = super().to_map()
10101
+ if _map is not None:
10102
+ return _map
10103
+
10104
+ result = dict()
10105
+ if self.check_sum is not None:
10106
+ result['checkSum'] = self.check_sum
10107
+ if self.error_message is not None:
10108
+ result['errorMessage'] = self.error_message
10109
+ if self.execute_time is not None:
10110
+ result['executeTime'] = self.execute_time
10111
+ if self.file_path is not None:
10112
+ result['filePath'] = self.file_path
10113
+ if self.file_size is not None:
10114
+ result['fileSize'] = self.file_size
10115
+ if self.log_count is not None:
10116
+ result['logCount'] = self.log_count
10117
+ if self.progress is not None:
10118
+ result['progress'] = self.progress
10119
+ return result
10120
+
10121
+ def from_map(self, m: dict = None):
10122
+ m = m or dict()
10123
+ if m.get('checkSum') is not None:
10124
+ self.check_sum = m.get('checkSum')
10125
+ if m.get('errorMessage') is not None:
10126
+ self.error_message = m.get('errorMessage')
10127
+ if m.get('executeTime') is not None:
10128
+ self.execute_time = m.get('executeTime')
10129
+ if m.get('filePath') is not None:
10130
+ self.file_path = m.get('filePath')
10131
+ if m.get('fileSize') is not None:
10132
+ self.file_size = m.get('fileSize')
10133
+ if m.get('logCount') is not None:
10134
+ self.log_count = m.get('logCount')
10135
+ if m.get('progress') is not None:
10136
+ self.progress = m.get('progress')
10137
+ return self
10138
+
10139
+
10140
+ class GetDownloadJobResponseBody(TeaModel):
10141
+ def __init__(
10142
+ self,
10143
+ configuration: GetDownloadJobResponseBodyConfiguration = None,
10144
+ create_time: str = None,
10145
+ description: str = None,
10146
+ display_name: str = None,
10147
+ execution_details: GetDownloadJobResponseBodyExecutionDetails = None,
10148
+ name: str = None,
10149
+ status: str = None,
10150
+ ):
10151
+ # 下载配置
10152
+ self.configuration = configuration
10153
+ # 代表创建时间的资源属性字段
10154
+ self.create_time = create_time
10155
+ # 任务描述
10156
+ self.description = description
10157
+ # 任务显示名称
10158
+ self.display_name = display_name
10159
+ # 任务执行细节
10160
+ self.execution_details = execution_details
10161
+ # 代表资源名称的资源属性字段
10162
+ self.name = name
10163
+ # 代表资源状态的资源属性字段
10164
+ self.status = status
10165
+
10166
+ def validate(self):
10167
+ if self.configuration:
10168
+ self.configuration.validate()
10169
+ if self.execution_details:
10170
+ self.execution_details.validate()
10171
+
10172
+ def to_map(self):
10173
+ _map = super().to_map()
10174
+ if _map is not None:
10175
+ return _map
10176
+
10177
+ result = dict()
10178
+ if self.configuration is not None:
10179
+ result['configuration'] = self.configuration.to_map()
10180
+ if self.create_time is not None:
10181
+ result['createTime'] = self.create_time
10182
+ if self.description is not None:
10183
+ result['description'] = self.description
10184
+ if self.display_name is not None:
10185
+ result['displayName'] = self.display_name
10186
+ if self.execution_details is not None:
10187
+ result['executionDetails'] = self.execution_details.to_map()
10188
+ if self.name is not None:
10189
+ result['name'] = self.name
10190
+ if self.status is not None:
10191
+ result['status'] = self.status
10192
+ return result
10193
+
10194
+ def from_map(self, m: dict = None):
10195
+ m = m or dict()
10196
+ if m.get('configuration') is not None:
10197
+ temp_model = GetDownloadJobResponseBodyConfiguration()
10198
+ self.configuration = temp_model.from_map(m['configuration'])
10199
+ if m.get('createTime') is not None:
10200
+ self.create_time = m.get('createTime')
10201
+ if m.get('description') is not None:
10202
+ self.description = m.get('description')
10203
+ if m.get('displayName') is not None:
10204
+ self.display_name = m.get('displayName')
10205
+ if m.get('executionDetails') is not None:
10206
+ temp_model = GetDownloadJobResponseBodyExecutionDetails()
10207
+ self.execution_details = temp_model.from_map(m['executionDetails'])
10208
+ if m.get('name') is not None:
10209
+ self.name = m.get('name')
10210
+ if m.get('status') is not None:
10211
+ self.status = m.get('status')
10212
+ return self
10213
+
10214
+
10215
+ class GetDownloadJobResponse(TeaModel):
10216
+ def __init__(
10217
+ self,
10218
+ headers: Dict[str, str] = None,
10219
+ status_code: int = None,
10220
+ body: GetDownloadJobResponseBody = None,
10221
+ ):
10222
+ self.headers = headers
10223
+ self.status_code = status_code
10224
+ self.body = body
10225
+
10226
+ def validate(self):
10227
+ if self.body:
10228
+ self.body.validate()
10229
+
10230
+ def to_map(self):
10231
+ _map = super().to_map()
10232
+ if _map is not None:
10233
+ return _map
10234
+
10235
+ result = dict()
10236
+ if self.headers is not None:
10237
+ result['headers'] = self.headers
10238
+ if self.status_code is not None:
10239
+ result['statusCode'] = self.status_code
10240
+ if self.body is not None:
10241
+ result['body'] = self.body.to_map()
10242
+ return result
10243
+
10244
+ def from_map(self, m: dict = None):
10245
+ m = m or dict()
10246
+ if m.get('headers') is not None:
10247
+ self.headers = m.get('headers')
10248
+ if m.get('statusCode') is not None:
10249
+ self.status_code = m.get('statusCode')
10250
+ if m.get('body') is not None:
10251
+ temp_model = GetDownloadJobResponseBody()
10252
+ self.body = temp_model.from_map(m['body'])
10253
+ return self
10254
+
10255
+
10256
+ class GetETLResponse(TeaModel):
10257
+ def __init__(
10258
+ self,
10259
+ headers: Dict[str, str] = None,
10260
+ status_code: int = None,
10261
+ body: ETL = None,
10262
+ ):
10263
+ self.headers = headers
10264
+ self.status_code = status_code
10265
+ self.body = body
10266
+
10267
+ def validate(self):
10268
+ if self.body:
10269
+ self.body.validate()
10270
+
10271
+ def to_map(self):
10272
+ _map = super().to_map()
10273
+ if _map is not None:
10274
+ return _map
10275
+
10276
+ result = dict()
10277
+ if self.headers is not None:
10278
+ result['headers'] = self.headers
10279
+ if self.status_code is not None:
10280
+ result['statusCode'] = self.status_code
10281
+ if self.body is not None:
10282
+ result['body'] = self.body.to_map()
10283
+ return result
10284
+
10285
+ def from_map(self, m: dict = None):
10286
+ m = m or dict()
10287
+ if m.get('headers') is not None:
10288
+ self.headers = m.get('headers')
10289
+ if m.get('statusCode') is not None:
10290
+ self.status_code = m.get('statusCode')
10291
+ if m.get('body') is not None:
10292
+ temp_model = ETL()
10293
+ self.body = temp_model.from_map(m['body'])
10294
+ return self
10295
+
10296
+
10297
+ class GetExternalStoreResponse(TeaModel):
10298
+ def __init__(
10299
+ self,
10300
+ headers: Dict[str, str] = None,
10301
+ status_code: int = None,
10302
+ body: ExternalStore = None,
10303
+ ):
10304
+ self.headers = headers
10305
+ self.status_code = status_code
10306
+ self.body = body
10307
+
10308
+ def validate(self):
10309
+ if self.body:
10310
+ self.body.validate()
9264
10311
 
9265
10312
  def to_map(self):
9266
10313
  _map = super().to_map()
@@ -9696,6 +10743,7 @@ class GetLogStoreMeteringModeResponseBody(TeaModel):
9696
10743
  self,
9697
10744
  metering_mode: str = None,
9698
10745
  ):
10746
+ # The billing mode. Default value: ChargeByFunction. Valid values: ChargeByFunction and ChargeByDataIngest.
9699
10747
  self.metering_mode = metering_mode
9700
10748
 
9701
10749
  def validate(self):
@@ -10006,28 +11054,29 @@ class GetLogsV2Request(TeaModel):
10006
11054
  to: int = None,
10007
11055
  topic: str = None,
10008
11056
  ):
10009
- # Specifies whether to page forward or backward for the scan-based query or the phrase search.
11057
+ # Specifies whether to page forward or backward for the scan-based query or phrase search.
10010
11058
  self.forward = forward
10011
11059
  # The beginning of the time range to query. The value is the log time that is specified when log data is written.
10012
11060
  #
10013
- # The time range that is specified in this operation is a left-closed, right-open interval. The interval includes the start time specified by the from parameter, but does not include the end time specified by the to parameter. If you specify the same value for the from and to parameters, the interval is invalid, and an error message is returned. The value is a UNIX timestamp representing the number of seconds that have elapsed since January 1, 1970, 00:00:00 UTC.
11061
+ # The time range that is specified in this operation is a left-closed, right-open interval. The interval includes the start time specified by the from parameter, but does not include the end time specified by the to parameter. If you specify the same value for the from and to parameters, the interval is invalid, and an error message is returned. The value is a timestamp that follows the UNIX time format. It is the number of seconds that have elapsed since January 1, 1970, 00:00:00 UTC.
10014
11062
  #
10015
11063
  # This parameter is required.
10016
11064
  self.from_ = from_
11065
+ # Specifies whether to highlight the returned result.
10017
11066
  self.highlight = highlight
10018
- # The maximum number of logs to return for the request. This parameter takes effect only when the query parameter is set to a search statement. Minimum value: 0. Maximum value: 100. Default value: 100.
11067
+ # The maximum number of logs to return for the request. This parameter takes effect only when the query parameter is set to a search statement. Valid values: 0 to 100. Default value: 100.
10019
11068
  self.line = line
10020
11069
  # The line from which the query starts. This parameter takes effect only when the query parameter is set to a search statement. Default value: 0.
10021
11070
  self.offset = offset
10022
11071
  # Specifies whether to enable the SQL enhancement feature. By default, the feature is disabled.
10023
11072
  self.power_sql = power_sql
10024
- # The search statement or the query statement. For more information, see the "Log search overview" and "Log analysis overview" topics.
11073
+ # The search statement or query statement. For more information, see the "Log search overview" and "Log analysis overview" topics.
10025
11074
  #
10026
- # If you add set session parallel_sql=true; to the analytic statement in the query parameter, Dedicated SQL is used. For example, you can set the query parameter to \\* | set session parallel_sql=true; select count(\\*) as pv.
11075
+ # If you add set session parallel_sql=true; to the analytic statement in the query parameter, Dedicated SQL is used. Example: \\* | set session parallel_sql=true; select count(\\*) as pv.
10027
11076
  #
10028
- # Note: If you specify an analytic statement in the query parameter, the line and offset parameters do not take effect in this operation. In this case, we recommend that you set the line and offset parameters to 0 and use the LIMIT clause to limit the number of logs to return on each page. For more information, see the "Perform paged queries" topic.
11077
+ # Note: If you specify an analytic statement in the query parameter, the line and offset parameters do not take effect in this operation. In this case, we recommend that you set the line and offset parameters to 0 and use the LIMIT clause to specify the number of logs to return on each page. For more information, see the "Perform paged queries" topic.
10029
11078
  self.query = query
10030
- # Specifies whether to return logs in reverse chronological order of log timestamps. The log timestamps are accurate to the minute. Valid values:
11079
+ # Specifies whether to return logs in reverse chronological order of log timestamps. The log timestamps are accurate to minutes. Valid values:
10031
11080
  #
10032
11081
  # true: Logs are returned in reverse chronological order of log timestamps. false (default): Logs are returned in chronological order of log timestamps. Note: The reverse parameter takes effect only when the query parameter is set to a search statement. The reverse parameter specifies the method used to sort returned logs. If the query parameter is set to a query statement, the reverse parameter does not take effect. The method used to sort returned logs is specified by the ORDER BY clause in the analytic statement. If you use the keyword asc in the ORDER BY clause, the logs are sorted in chronological order. If you use the keyword desc in the ORDER BY clause, the logs are sorted in reverse chronological order. By default, asc is used in the ORDER BY clause.
10033
11082
  self.reverse = reverse
@@ -10035,7 +11084,7 @@ class GetLogsV2Request(TeaModel):
10035
11084
  self.session = session
10036
11085
  # The end of the time range to query. The value is the log time that is specified when log data is written.
10037
11086
  #
10038
- # The time range that is specified in this operation is a left-closed, right-open interval. The interval includes the start time specified by the from parameter, but does not include the end time specified by the to parameter. If you specify the same value for the from and to parameters, the interval is invalid, and an error message is returned. The value is a UNIX timestamp representing the number of seconds that have elapsed since January 1, 1970, 00:00:00 UTC.
11087
+ # The time range that is specified in this operation is a left-closed, right-open interval. The interval includes the start time specified by the from parameter, but does not include the end time specified by the to parameter. If you specify the same value for the from and to parameters, the interval is invalid, and an error message is returned. The value is a timestamp that follows the UNIX time format. It is the number of seconds that have elapsed since January 1, 1970, 00:00:00 UTC.
10039
11088
  #
10040
11089
  # This parameter is required.
10041
11090
  self.to = to
@@ -10183,7 +11232,7 @@ class GetLogsV2ResponseBodyMeta(TeaModel):
10183
11232
  # Indicates whether the query is an SQL query.
10184
11233
  self.has_sql = has_sql
10185
11234
  self.highlights = highlights
10186
- # Indicates whether the returned result is accurate.
11235
+ # Indicates whether the returned result is accurate to seconds.
10187
11236
  self.is_accurate = is_accurate
10188
11237
  # All keys in the query result.
10189
11238
  self.keys = keys
@@ -10329,7 +11378,7 @@ class GetLogsV2ResponseBody(TeaModel):
10329
11378
  ):
10330
11379
  # The returned result.
10331
11380
  self.data = data
10332
- # The metadata that is returned.
11381
+ # The metadata of the returned data.
10333
11382
  self.meta = meta
10334
11383
 
10335
11384
  def validate(self):
@@ -10596,12 +11645,28 @@ class GetMachineGroupResponse(TeaModel):
10596
11645
  return self
10597
11646
 
10598
11647
 
10599
- class GetMetricStoreMeteringModeResponseBody(TeaModel):
11648
+ class GetMetricStoreResponseBody(TeaModel):
10600
11649
  def __init__(
10601
11650
  self,
10602
- metering_mode: str = None,
11651
+ auto_split: bool = None,
11652
+ create_time: int = None,
11653
+ last_modify_time: int = None,
11654
+ max_split_shard: int = None,
11655
+ metric_type: str = None,
11656
+ mode: str = None,
11657
+ name: str = None,
11658
+ shard_count: int = None,
11659
+ ttl: int = None,
10603
11660
  ):
10604
- self.metering_mode = metering_mode
11661
+ self.auto_split = auto_split
11662
+ self.create_time = create_time
11663
+ self.last_modify_time = last_modify_time
11664
+ self.max_split_shard = max_split_shard
11665
+ self.metric_type = metric_type
11666
+ self.mode = mode
11667
+ self.name = name
11668
+ self.shard_count = shard_count
11669
+ self.ttl = ttl
10605
11670
 
10606
11671
  def validate(self):
10607
11672
  pass
@@ -10612,17 +11677,118 @@ class GetMetricStoreMeteringModeResponseBody(TeaModel):
10612
11677
  return _map
10613
11678
 
10614
11679
  result = dict()
10615
- if self.metering_mode is not None:
10616
- result['meteringMode'] = self.metering_mode
11680
+ if self.auto_split is not None:
11681
+ result['autoSplit'] = self.auto_split
11682
+ if self.create_time is not None:
11683
+ result['createTime'] = self.create_time
11684
+ if self.last_modify_time is not None:
11685
+ result['lastModifyTime'] = self.last_modify_time
11686
+ if self.max_split_shard is not None:
11687
+ result['maxSplitShard'] = self.max_split_shard
11688
+ if self.metric_type is not None:
11689
+ result['metricType'] = self.metric_type
11690
+ if self.mode is not None:
11691
+ result['mode'] = self.mode
11692
+ if self.name is not None:
11693
+ result['name'] = self.name
11694
+ if self.shard_count is not None:
11695
+ result['shardCount'] = self.shard_count
11696
+ if self.ttl is not None:
11697
+ result['ttl'] = self.ttl
10617
11698
  return result
10618
11699
 
10619
11700
  def from_map(self, m: dict = None):
10620
11701
  m = m or dict()
10621
- if m.get('meteringMode') is not None:
10622
- self.metering_mode = m.get('meteringMode')
10623
- return self
10624
-
10625
-
11702
+ if m.get('autoSplit') is not None:
11703
+ self.auto_split = m.get('autoSplit')
11704
+ if m.get('createTime') is not None:
11705
+ self.create_time = m.get('createTime')
11706
+ if m.get('lastModifyTime') is not None:
11707
+ self.last_modify_time = m.get('lastModifyTime')
11708
+ if m.get('maxSplitShard') is not None:
11709
+ self.max_split_shard = m.get('maxSplitShard')
11710
+ if m.get('metricType') is not None:
11711
+ self.metric_type = m.get('metricType')
11712
+ if m.get('mode') is not None:
11713
+ self.mode = m.get('mode')
11714
+ if m.get('name') is not None:
11715
+ self.name = m.get('name')
11716
+ if m.get('shardCount') is not None:
11717
+ self.shard_count = m.get('shardCount')
11718
+ if m.get('ttl') is not None:
11719
+ self.ttl = m.get('ttl')
11720
+ return self
11721
+
11722
+
11723
+ class GetMetricStoreResponse(TeaModel):
11724
+ def __init__(
11725
+ self,
11726
+ headers: Dict[str, str] = None,
11727
+ status_code: int = None,
11728
+ body: GetMetricStoreResponseBody = None,
11729
+ ):
11730
+ self.headers = headers
11731
+ self.status_code = status_code
11732
+ self.body = body
11733
+
11734
+ def validate(self):
11735
+ if self.body:
11736
+ self.body.validate()
11737
+
11738
+ def to_map(self):
11739
+ _map = super().to_map()
11740
+ if _map is not None:
11741
+ return _map
11742
+
11743
+ result = dict()
11744
+ if self.headers is not None:
11745
+ result['headers'] = self.headers
11746
+ if self.status_code is not None:
11747
+ result['statusCode'] = self.status_code
11748
+ if self.body is not None:
11749
+ result['body'] = self.body.to_map()
11750
+ return result
11751
+
11752
+ def from_map(self, m: dict = None):
11753
+ m = m or dict()
11754
+ if m.get('headers') is not None:
11755
+ self.headers = m.get('headers')
11756
+ if m.get('statusCode') is not None:
11757
+ self.status_code = m.get('statusCode')
11758
+ if m.get('body') is not None:
11759
+ temp_model = GetMetricStoreResponseBody()
11760
+ self.body = temp_model.from_map(m['body'])
11761
+ return self
11762
+
11763
+
11764
+ class GetMetricStoreMeteringModeResponseBody(TeaModel):
11765
+ def __init__(
11766
+ self,
11767
+ metering_mode: str = None,
11768
+ ):
11769
+ # The billing mode. Default value: ChargeByFunction. Valid values: ChargeByFunction and ChargeByDataIngest.
11770
+ self.metering_mode = metering_mode
11771
+
11772
+ def validate(self):
11773
+ pass
11774
+
11775
+ def to_map(self):
11776
+ _map = super().to_map()
11777
+ if _map is not None:
11778
+ return _map
11779
+
11780
+ result = dict()
11781
+ if self.metering_mode is not None:
11782
+ result['meteringMode'] = self.metering_mode
11783
+ return result
11784
+
11785
+ def from_map(self, m: dict = None):
11786
+ m = m or dict()
11787
+ if m.get('meteringMode') is not None:
11788
+ self.metering_mode = m.get('meteringMode')
11789
+ return self
11790
+
11791
+
10626
11792
  class GetMetricStoreMeteringModeResponse(TeaModel):
10627
11793
  def __init__(
10628
11794
  self,
@@ -11175,7 +12341,27 @@ class GetStoreViewResponseBody(TeaModel):
11175
12341
  store_type: str = None,
11176
12342
  stores: List[StoreViewStore] = None,
11177
12343
  ):
12344
+ # The type of the dataset.
12345
+ #
12346
+ # Valid values:
12347
+ #
12348
+ # * metricstore
12349
+ #
12350
+ # <!-- -->
12351
+ #
12352
+ # <!-- -->
12353
+ #
12354
+ # <!-- -->
12355
+ #
12356
+ # * logstore
12357
+ #
12358
+ # <!-- -->
12359
+ #
12360
+ # <!-- -->
12361
+ #
12362
+ # <!-- -->
11178
12363
  self.store_type = store_type
12364
+ # The Logstores or Metricstores.
11179
12365
  self.stores = stores
11180
12366
 
11181
12367
  def validate(self):
@@ -11258,8 +12444,11 @@ class GetStoreViewIndexResponseBodyIndexes(TeaModel):
11258
12444
  logstore: str = None,
11259
12445
  project: str = None,
11260
12446
  ):
12447
+ # The index configurations of the Logstore.
11261
12448
  self.index = index
12449
+ # The name of the Logstore.
11262
12450
  self.logstore = logstore
12451
+ # The name of the project to which the Logstore belongs.
11263
12452
  self.project = project
11264
12453
 
11265
12454
  def validate(self):
@@ -11297,6 +12486,7 @@ class GetStoreViewIndexResponseBody(TeaModel):
11297
12486
  self,
11298
12487
  indexes: List[GetStoreViewIndexResponseBodyIndexes] = None,
11299
12488
  ):
12489
+ # The index configurations.
11300
12490
  self.indexes = indexes
11301
12491
 
11302
12492
  def validate(self):
@@ -11414,8 +12604,11 @@ class ListAlertsResponseBody(TeaModel):
11414
12604
  results: List[Alert] = None,
11415
12605
  total: int = None,
11416
12606
  ):
12607
+ # The number of alert rules that are returned.
11417
12608
  self.count = count
12609
+ # The alert rules.
11418
12610
  self.results = results
12611
+ # The total number of alert rules in the project.
11419
12612
  self.total = total
11420
12613
 
11421
12614
  def validate(self):
@@ -12354,7 +13547,7 @@ class ListConfigRequest(TeaModel):
12354
13547
  offset: int = None,
12355
13548
  size: int = None,
12356
13549
  ):
12357
- # The name of the Logtail configuration.
13550
+ # The name of the Logtail configuration, which is used for fuzzy match.
12358
13551
  self.config_name = config_name
12359
13552
  # The name of the Logstore.
12360
13553
  #
@@ -12727,22 +13920,418 @@ class ListDomainsRequest(TeaModel):
12727
13920
  return self
12728
13921
 
12729
13922
 
12730
- class ListDomainsResponseBody(TeaModel):
13923
+ class ListDomainsResponseBody(TeaModel):
13924
+ def __init__(
13925
+ self,
13926
+ count: int = None,
13927
+ domains: List[str] = None,
13928
+ total: int = None,
13929
+ ):
13930
+ # The number of domain names that are returned on the current page.
13931
+ self.count = count
13932
+ # The domain names.
13933
+ self.domains = domains
13934
+ # The total number of domain names that are returned.
13935
+ self.total = total
13936
+
13937
+ def validate(self):
13938
+ pass
13939
+
13940
+ def to_map(self):
13941
+ _map = super().to_map()
13942
+ if _map is not None:
13943
+ return _map
13944
+
13945
+ result = dict()
13946
+ if self.count is not None:
13947
+ result['count'] = self.count
13948
+ if self.domains is not None:
13949
+ result['domains'] = self.domains
13950
+ if self.total is not None:
13951
+ result['total'] = self.total
13952
+ return result
13953
+
13954
+ def from_map(self, m: dict = None):
13955
+ m = m or dict()
13956
+ if m.get('count') is not None:
13957
+ self.count = m.get('count')
13958
+ if m.get('domains') is not None:
13959
+ self.domains = m.get('domains')
13960
+ if m.get('total') is not None:
13961
+ self.total = m.get('total')
13962
+ return self
13963
+
13964
+
13965
+ class ListDomainsResponse(TeaModel):
13966
+ def __init__(
13967
+ self,
13968
+ headers: Dict[str, str] = None,
13969
+ status_code: int = None,
13970
+ body: ListDomainsResponseBody = None,
13971
+ ):
13972
+ self.headers = headers
13973
+ self.status_code = status_code
13974
+ self.body = body
13975
+
13976
+ def validate(self):
13977
+ if self.body:
13978
+ self.body.validate()
13979
+
13980
+ def to_map(self):
13981
+ _map = super().to_map()
13982
+ if _map is not None:
13983
+ return _map
13984
+
13985
+ result = dict()
13986
+ if self.headers is not None:
13987
+ result['headers'] = self.headers
13988
+ if self.status_code is not None:
13989
+ result['statusCode'] = self.status_code
13990
+ if self.body is not None:
13991
+ result['body'] = self.body.to_map()
13992
+ return result
13993
+
13994
+ def from_map(self, m: dict = None):
13995
+ m = m or dict()
13996
+ if m.get('headers') is not None:
13997
+ self.headers = m.get('headers')
13998
+ if m.get('statusCode') is not None:
13999
+ self.status_code = m.get('statusCode')
14000
+ if m.get('body') is not None:
14001
+ temp_model = ListDomainsResponseBody()
14002
+ self.body = temp_model.from_map(m['body'])
14003
+ return self
14004
+
14005
+
14006
+ class ListDownloadJobsRequest(TeaModel):
14007
+ def __init__(
14008
+ self,
14009
+ logstore: str = None,
14010
+ offset: int = None,
14011
+ size: int = None,
14012
+ ):
14013
+ self.logstore = logstore
14014
+ self.offset = offset
14015
+ self.size = size
14016
+
14017
+ def validate(self):
14018
+ pass
14019
+
14020
+ def to_map(self):
14021
+ _map = super().to_map()
14022
+ if _map is not None:
14023
+ return _map
14024
+
14025
+ result = dict()
14026
+ if self.logstore is not None:
14027
+ result['logstore'] = self.logstore
14028
+ if self.offset is not None:
14029
+ result['offset'] = self.offset
14030
+ if self.size is not None:
14031
+ result['size'] = self.size
14032
+ return result
14033
+
14034
+ def from_map(self, m: dict = None):
14035
+ m = m or dict()
14036
+ if m.get('logstore') is not None:
14037
+ self.logstore = m.get('logstore')
14038
+ if m.get('offset') is not None:
14039
+ self.offset = m.get('offset')
14040
+ if m.get('size') is not None:
14041
+ self.size = m.get('size')
14042
+ return self
14043
+
14044
+
14045
+ class ListDownloadJobsResponseBodyResultsConfigurationSink(TeaModel):
14046
+ def __init__(
14047
+ self,
14048
+ bucket: str = None,
14049
+ compression_type: str = None,
14050
+ content_type: str = None,
14051
+ prefix: str = None,
14052
+ role_arn: str = None,
14053
+ type: str = None,
14054
+ ):
14055
+ # 对象存储桶
14056
+ self.bucket = bucket
14057
+ # 压缩格式
14058
+ self.compression_type = compression_type
14059
+ # 下载文件格式
14060
+ self.content_type = content_type
14061
+ self.prefix = prefix
14062
+ # 下载使用roleArn
14063
+ self.role_arn = role_arn
14064
+ self.type = type
14065
+
14066
+ def validate(self):
14067
+ pass
14068
+
14069
+ def to_map(self):
14070
+ _map = super().to_map()
14071
+ if _map is not None:
14072
+ return _map
14073
+
14074
+ result = dict()
14075
+ if self.bucket is not None:
14076
+ result['bucket'] = self.bucket
14077
+ if self.compression_type is not None:
14078
+ result['compressionType'] = self.compression_type
14079
+ if self.content_type is not None:
14080
+ result['contentType'] = self.content_type
14081
+ if self.prefix is not None:
14082
+ result['prefix'] = self.prefix
14083
+ if self.role_arn is not None:
14084
+ result['roleArn'] = self.role_arn
14085
+ if self.type is not None:
14086
+ result['type'] = self.type
14087
+ return result
14088
+
14089
+ def from_map(self, m: dict = None):
14090
+ m = m or dict()
14091
+ if m.get('bucket') is not None:
14092
+ self.bucket = m.get('bucket')
14093
+ if m.get('compressionType') is not None:
14094
+ self.compression_type = m.get('compressionType')
14095
+ if m.get('contentType') is not None:
14096
+ self.content_type = m.get('contentType')
14097
+ if m.get('prefix') is not None:
14098
+ self.prefix = m.get('prefix')
14099
+ if m.get('roleArn') is not None:
14100
+ self.role_arn = m.get('roleArn')
14101
+ if m.get('type') is not None:
14102
+ self.type = m.get('type')
14103
+ return self
14104
+
14105
+
14106
+ class ListDownloadJobsResponseBodyResultsConfiguration(TeaModel):
14107
+ def __init__(
14108
+ self,
14109
+ allow_in_complete: str = None,
14110
+ from_time: int = None,
14111
+ logstore: str = None,
14112
+ power_sql: bool = None,
14113
+ query: str = None,
14114
+ sink: ListDownloadJobsResponseBodyResultsConfigurationSink = None,
14115
+ to_time: int = None,
14116
+ ):
14117
+ self.allow_in_complete = allow_in_complete
14118
+ # 起点时间戳(精确到秒)
14119
+ self.from_time = from_time
14120
+ # 源logstore
14121
+ self.logstore = logstore
14122
+ # 是否启用powerSql
14123
+ self.power_sql = power_sql
14124
+ # 查询语句
14125
+ self.query = query
14126
+ # 导出配置
14127
+ self.sink = sink
14128
+ # 结束时间戳(精确到秒)
14129
+ self.to_time = to_time
14130
+
14131
+ def validate(self):
14132
+ if self.sink:
14133
+ self.sink.validate()
14134
+
14135
+ def to_map(self):
14136
+ _map = super().to_map()
14137
+ if _map is not None:
14138
+ return _map
14139
+
14140
+ result = dict()
14141
+ if self.allow_in_complete is not None:
14142
+ result['allowInComplete'] = self.allow_in_complete
14143
+ if self.from_time is not None:
14144
+ result['fromTime'] = self.from_time
14145
+ if self.logstore is not None:
14146
+ result['logstore'] = self.logstore
14147
+ if self.power_sql is not None:
14148
+ result['powerSql'] = self.power_sql
14149
+ if self.query is not None:
14150
+ result['query'] = self.query
14151
+ if self.sink is not None:
14152
+ result['sink'] = self.sink.to_map()
14153
+ if self.to_time is not None:
14154
+ result['toTime'] = self.to_time
14155
+ return result
14156
+
14157
+ def from_map(self, m: dict = None):
14158
+ m = m or dict()
14159
+ if m.get('allowInComplete') is not None:
14160
+ self.allow_in_complete = m.get('allowInComplete')
14161
+ if m.get('fromTime') is not None:
14162
+ self.from_time = m.get('fromTime')
14163
+ if m.get('logstore') is not None:
14164
+ self.logstore = m.get('logstore')
14165
+ if m.get('powerSql') is not None:
14166
+ self.power_sql = m.get('powerSql')
14167
+ if m.get('query') is not None:
14168
+ self.query = m.get('query')
14169
+ if m.get('sink') is not None:
14170
+ temp_model = ListDownloadJobsResponseBodyResultsConfigurationSink()
14171
+ self.sink = temp_model.from_map(m['sink'])
14172
+ if m.get('toTime') is not None:
14173
+ self.to_time = m.get('toTime')
14174
+ return self
14175
+
14176
+
14177
+ class ListDownloadJobsResponseBodyResultsExecutionDetails(TeaModel):
14178
+ def __init__(
14179
+ self,
14180
+ check_sum: str = None,
14181
+ error_message: str = None,
14182
+ execute_time: int = None,
14183
+ file_path: str = None,
14184
+ file_size: int = None,
14185
+ log_count: int = None,
14186
+ progress: int = None,
14187
+ ):
14188
+ self.check_sum = check_sum
14189
+ # 下载错误信息
14190
+ self.error_message = error_message
14191
+ # 下载执行时间
14192
+ self.execute_time = execute_time
14193
+ # 下载结果链接
14194
+ self.file_path = file_path
14195
+ # 下载文件大小
14196
+ self.file_size = file_size
14197
+ # 下载日志条数
14198
+ self.log_count = log_count
14199
+ # 下载进度
14200
+ self.progress = progress
14201
+
14202
+ def validate(self):
14203
+ pass
14204
+
14205
+ def to_map(self):
14206
+ _map = super().to_map()
14207
+ if _map is not None:
14208
+ return _map
14209
+
14210
+ result = dict()
14211
+ if self.check_sum is not None:
14212
+ result['checkSum'] = self.check_sum
14213
+ if self.error_message is not None:
14214
+ result['errorMessage'] = self.error_message
14215
+ if self.execute_time is not None:
14216
+ result['executeTime'] = self.execute_time
14217
+ if self.file_path is not None:
14218
+ result['filePath'] = self.file_path
14219
+ if self.file_size is not None:
14220
+ result['fileSize'] = self.file_size
14221
+ if self.log_count is not None:
14222
+ result['logCount'] = self.log_count
14223
+ if self.progress is not None:
14224
+ result['progress'] = self.progress
14225
+ return result
14226
+
14227
+ def from_map(self, m: dict = None):
14228
+ m = m or dict()
14229
+ if m.get('checkSum') is not None:
14230
+ self.check_sum = m.get('checkSum')
14231
+ if m.get('errorMessage') is not None:
14232
+ self.error_message = m.get('errorMessage')
14233
+ if m.get('executeTime') is not None:
14234
+ self.execute_time = m.get('executeTime')
14235
+ if m.get('filePath') is not None:
14236
+ self.file_path = m.get('filePath')
14237
+ if m.get('fileSize') is not None:
14238
+ self.file_size = m.get('fileSize')
14239
+ if m.get('logCount') is not None:
14240
+ self.log_count = m.get('logCount')
14241
+ if m.get('progress') is not None:
14242
+ self.progress = m.get('progress')
14243
+ return self
14244
+
14245
+
14246
+ class ListDownloadJobsResponseBodyResults(TeaModel):
14247
+ def __init__(
14248
+ self,
14249
+ configuration: ListDownloadJobsResponseBodyResultsConfiguration = None,
14250
+ create_time: str = None,
14251
+ description: str = None,
14252
+ display_name: str = None,
14253
+ execution_details: ListDownloadJobsResponseBodyResultsExecutionDetails = None,
14254
+ name: str = None,
14255
+ status: str = None,
14256
+ ):
14257
+ # 下载配置
14258
+ self.configuration = configuration
14259
+ self.create_time = create_time
14260
+ # 任务描述
14261
+ self.description = description
14262
+ # 任务显示名称
14263
+ self.display_name = display_name
14264
+ # 任务执行细节
14265
+ self.execution_details = execution_details
14266
+ # 代表资源名称的资源属性字段
14267
+ self.name = name
14268
+ self.status = status
14269
+
14270
+ def validate(self):
14271
+ if self.configuration:
14272
+ self.configuration.validate()
14273
+ if self.execution_details:
14274
+ self.execution_details.validate()
14275
+
14276
+ def to_map(self):
14277
+ _map = super().to_map()
14278
+ if _map is not None:
14279
+ return _map
14280
+
14281
+ result = dict()
14282
+ if self.configuration is not None:
14283
+ result['configuration'] = self.configuration.to_map()
14284
+ if self.create_time is not None:
14285
+ result['createTime'] = self.create_time
14286
+ if self.description is not None:
14287
+ result['description'] = self.description
14288
+ if self.display_name is not None:
14289
+ result['displayName'] = self.display_name
14290
+ if self.execution_details is not None:
14291
+ result['executionDetails'] = self.execution_details.to_map()
14292
+ if self.name is not None:
14293
+ result['name'] = self.name
14294
+ if self.status is not None:
14295
+ result['status'] = self.status
14296
+ return result
14297
+
14298
+ def from_map(self, m: dict = None):
14299
+ m = m or dict()
14300
+ if m.get('configuration') is not None:
14301
+ temp_model = ListDownloadJobsResponseBodyResultsConfiguration()
14302
+ self.configuration = temp_model.from_map(m['configuration'])
14303
+ if m.get('createTime') is not None:
14304
+ self.create_time = m.get('createTime')
14305
+ if m.get('description') is not None:
14306
+ self.description = m.get('description')
14307
+ if m.get('displayName') is not None:
14308
+ self.display_name = m.get('displayName')
14309
+ if m.get('executionDetails') is not None:
14310
+ temp_model = ListDownloadJobsResponseBodyResultsExecutionDetails()
14311
+ self.execution_details = temp_model.from_map(m['executionDetails'])
14312
+ if m.get('name') is not None:
14313
+ self.name = m.get('name')
14314
+ if m.get('status') is not None:
14315
+ self.status = m.get('status')
14316
+ return self
14317
+
14318
+
14319
+ class ListDownloadJobsResponseBody(TeaModel):
12731
14320
  def __init__(
12732
14321
  self,
12733
14322
  count: int = None,
12734
- domains: List[str] = None,
14323
+ results: List[ListDownloadJobsResponseBodyResults] = None,
12735
14324
  total: int = None,
12736
14325
  ):
12737
- # The number of domain names that are returned on the current page.
12738
14326
  self.count = count
12739
- # The domain names.
12740
- self.domains = domains
12741
- # The total number of domain names that are returned.
14327
+ self.results = results
12742
14328
  self.total = total
12743
14329
 
12744
14330
  def validate(self):
12745
- pass
14331
+ if self.results:
14332
+ for k in self.results:
14333
+ if k:
14334
+ k.validate()
12746
14335
 
12747
14336
  def to_map(self):
12748
14337
  _map = super().to_map()
@@ -12752,8 +14341,10 @@ class ListDomainsResponseBody(TeaModel):
12752
14341
  result = dict()
12753
14342
  if self.count is not None:
12754
14343
  result['count'] = self.count
12755
- if self.domains is not None:
12756
- result['domains'] = self.domains
14344
+ result['results'] = []
14345
+ if self.results is not None:
14346
+ for k in self.results:
14347
+ result['results'].append(k.to_map() if k else None)
12757
14348
  if self.total is not None:
12758
14349
  result['total'] = self.total
12759
14350
  return result
@@ -12762,19 +14353,22 @@ class ListDomainsResponseBody(TeaModel):
12762
14353
  m = m or dict()
12763
14354
  if m.get('count') is not None:
12764
14355
  self.count = m.get('count')
12765
- if m.get('domains') is not None:
12766
- self.domains = m.get('domains')
14356
+ self.results = []
14357
+ if m.get('results') is not None:
14358
+ for k in m.get('results'):
14359
+ temp_model = ListDownloadJobsResponseBodyResults()
14360
+ self.results.append(temp_model.from_map(k))
12767
14361
  if m.get('total') is not None:
12768
14362
  self.total = m.get('total')
12769
14363
  return self
12770
14364
 
12771
14365
 
12772
- class ListDomainsResponse(TeaModel):
14366
+ class ListDownloadJobsResponse(TeaModel):
12773
14367
  def __init__(
12774
14368
  self,
12775
14369
  headers: Dict[str, str] = None,
12776
14370
  status_code: int = None,
12777
- body: ListDomainsResponseBody = None,
14371
+ body: ListDownloadJobsResponseBody = None,
12778
14372
  ):
12779
14373
  self.headers = headers
12780
14374
  self.status_code = status_code
@@ -12805,7 +14399,7 @@ class ListDomainsResponse(TeaModel):
12805
14399
  if m.get('statusCode') is not None:
12806
14400
  self.status_code = m.get('statusCode')
12807
14401
  if m.get('body') is not None:
12808
- temp_model = ListDomainsResponseBody()
14402
+ temp_model = ListDownloadJobsResponseBody()
12809
14403
  self.body = temp_model.from_map(m['body'])
12810
14404
  return self
12811
14405
 
@@ -13465,6 +15059,131 @@ class ListMachinesResponse(TeaModel):
13465
15059
  return self
13466
15060
 
13467
15061
 
15062
+ class ListMetricStoresRequest(TeaModel):
15063
+ def __init__(
15064
+ self,
15065
+ mode: str = None,
15066
+ name: str = None,
15067
+ offset: int = None,
15068
+ size: int = None,
15069
+ ):
15070
+ self.mode = mode
15071
+ self.name = name
15072
+ self.offset = offset
15073
+ self.size = size
15074
+
15075
+ def validate(self):
15076
+ pass
15077
+
15078
+ def to_map(self):
15079
+ _map = super().to_map()
15080
+ if _map is not None:
15081
+ return _map
15082
+
15083
+ result = dict()
15084
+ if self.mode is not None:
15085
+ result['mode'] = self.mode
15086
+ if self.name is not None:
15087
+ result['name'] = self.name
15088
+ if self.offset is not None:
15089
+ result['offset'] = self.offset
15090
+ if self.size is not None:
15091
+ result['size'] = self.size
15092
+ return result
15093
+
15094
+ def from_map(self, m: dict = None):
15095
+ m = m or dict()
15096
+ if m.get('mode') is not None:
15097
+ self.mode = m.get('mode')
15098
+ if m.get('name') is not None:
15099
+ self.name = m.get('name')
15100
+ if m.get('offset') is not None:
15101
+ self.offset = m.get('offset')
15102
+ if m.get('size') is not None:
15103
+ self.size = m.get('size')
15104
+ return self
15105
+
15106
+
15107
+ class ListMetricStoresResponseBody(TeaModel):
15108
+ def __init__(
15109
+ self,
15110
+ count: int = None,
15111
+ metricstores: List[str] = None,
15112
+ total: int = None,
15113
+ ):
15114
+ self.count = count
15115
+ self.metricstores = metricstores
15116
+ self.total = total
15117
+
15118
+ def validate(self):
15119
+ pass
15120
+
15121
+ def to_map(self):
15122
+ _map = super().to_map()
15123
+ if _map is not None:
15124
+ return _map
15125
+
15126
+ result = dict()
15127
+ if self.count is not None:
15128
+ result['count'] = self.count
15129
+ if self.metricstores is not None:
15130
+ result['metricstores'] = self.metricstores
15131
+ if self.total is not None:
15132
+ result['total'] = self.total
15133
+ return result
15134
+
15135
+ def from_map(self, m: dict = None):
15136
+ m = m or dict()
15137
+ if m.get('count') is not None:
15138
+ self.count = m.get('count')
15139
+ if m.get('metricstores') is not None:
15140
+ self.metricstores = m.get('metricstores')
15141
+ if m.get('total') is not None:
15142
+ self.total = m.get('total')
15143
+ return self
15144
+
15145
+
15146
+ class ListMetricStoresResponse(TeaModel):
15147
+ def __init__(
15148
+ self,
15149
+ headers: Dict[str, str] = None,
15150
+ status_code: int = None,
15151
+ body: ListMetricStoresResponseBody = None,
15152
+ ):
15153
+ self.headers = headers
15154
+ self.status_code = status_code
15155
+ self.body = body
15156
+
15157
+ def validate(self):
15158
+ if self.body:
15159
+ self.body.validate()
15160
+
15161
+ def to_map(self):
15162
+ _map = super().to_map()
15163
+ if _map is not None:
15164
+ return _map
15165
+
15166
+ result = dict()
15167
+ if self.headers is not None:
15168
+ result['headers'] = self.headers
15169
+ if self.status_code is not None:
15170
+ result['statusCode'] = self.status_code
15171
+ if self.body is not None:
15172
+ result['body'] = self.body.to_map()
15173
+ return result
15174
+
15175
+ def from_map(self, m: dict = None):
15176
+ m = m or dict()
15177
+ if m.get('headers') is not None:
15178
+ self.headers = m.get('headers')
15179
+ if m.get('statusCode') is not None:
15180
+ self.status_code = m.get('statusCode')
15181
+ if m.get('body') is not None:
15182
+ temp_model = ListMetricStoresResponseBody()
15183
+ self.body = temp_model.from_map(m['body'])
15184
+ return self
15185
+
15186
+
13468
15187
  class ListOSSExportsRequest(TeaModel):
13469
15188
  def __init__(
13470
15189
  self,
@@ -13765,8 +15484,11 @@ class ListOSSIngestionsResponseBody(TeaModel):
13765
15484
  results: List[OSSIngestion] = None,
13766
15485
  total: int = None,
13767
15486
  ):
15487
+ # The number of OSS data import jobs that are returned.
13768
15488
  self.count = count
15489
+ # The OSS data import jobs.
13769
15490
  self.results = results
15491
+ # The total number of OSS data import jobs in the project.
13770
15492
  self.total = total
13771
15493
 
13772
15494
  def validate(self):
@@ -14124,6 +15846,7 @@ class ListScheduledSQLsRequest(TeaModel):
14124
15846
  offset: int = None,
14125
15847
  size: int = None,
14126
15848
  ):
15849
+ # The name of the Logstore.
14127
15850
  self.logstore = logstore
14128
15851
  self.offset = offset
14129
15852
  self.size = size
@@ -14299,9 +16022,31 @@ class ListStoreViewsRequest(TeaModel):
14299
16022
  size: int = None,
14300
16023
  store_type: str = None,
14301
16024
  ):
16025
+ # The dataset name that is used for fuzzy match.
14302
16026
  self.name = name
16027
+ # The offset of the datasets to return. Default value: 0.
14303
16028
  self.offset = offset
16029
+ # The number of datasets to return. Default value: 100.
14304
16030
  self.size = size
16031
+ # The type of the datasets to return. By default, datasets are not filtered by type.
16032
+ #
16033
+ # Valid values:
16034
+ #
16035
+ # * metricstore
16036
+ #
16037
+ # <!-- -->
16038
+ #
16039
+ # <!-- -->
16040
+ #
16041
+ # <!-- -->
16042
+ #
16043
+ # * logstore
16044
+ #
16045
+ # <!-- -->
16046
+ #
16047
+ # <!-- -->
16048
+ #
16049
+ # <!-- -->
14305
16050
  self.store_type = store_type
14306
16051
 
14307
16052
  def validate(self):
@@ -14343,8 +16088,11 @@ class ListStoreViewsResponseBody(TeaModel):
14343
16088
  storeviews: List[str] = None,
14344
16089
  total: int = None,
14345
16090
  ):
16091
+ # The number of returned datasets.
14346
16092
  self.count = count
16093
+ # The dataset names.
14347
16094
  self.storeviews = storeviews
16095
+ # The total number of datasets in the project.
14348
16096
  self.total = total
14349
16097
 
14350
16098
  def validate(self):
@@ -15896,11 +17644,18 @@ class UpdateAlertRequest(TeaModel):
15896
17644
  display_name: str = None,
15897
17645
  schedule: Schedule = None,
15898
17646
  ):
17647
+ # The detailed configurations of the alert rule.
17648
+ #
15899
17649
  # This parameter is required.
15900
17650
  self.configuration = configuration
17651
+ # The description of the alert rule.
15901
17652
  self.description = description
17653
+ # The display name of the alert rule.
17654
+ #
15902
17655
  # This parameter is required.
15903
17656
  self.display_name = display_name
17657
+ # The scheduling settings of the alert rule.
17658
+ #
15904
17659
  # This parameter is required.
15905
17660
  self.schedule = schedule
15906
17661
 
@@ -16609,6 +18364,7 @@ class UpdateLogStoreRequest(TeaModel):
16609
18364
  logstore_name: str = None,
16610
18365
  max_split_shard: int = None,
16611
18366
  mode: str = None,
18367
+ processor_id: str = None,
16612
18368
  shard_count: int = None,
16613
18369
  telemetry_type: str = None,
16614
18370
  ttl: int = None,
@@ -16630,32 +18386,34 @@ class UpdateLogStoreRequest(TeaModel):
16630
18386
  self.enable_tracking = enable_tracking
16631
18387
  # The data structure of the encryption configuration.
16632
18388
  self.encrypt_conf = encrypt_conf
16633
- # The retention period of data in the hot storage tier of the Logstore. Minimum value: 30. Unit: day. You can specify a value that ranges from 30 to the value of ttl. Hot data that is stored for longer than the period specified by hot_ttl is converted to cold data. For more information, see [Enable hot and cold-tiered storage for a Logstore](https://help.aliyun.com/document_detail/308645.html).
18389
+ # The retention period of data in the hot storage tier of the Logstore. Valid values: 7 to 3000. Unit: days. After the retention period that is specified for the hot storage tier elapses, the data is moved to the Infrequent Access (IA) storage tier. For more information, see [Enable hot and cold-tiered storage for a Logstore](https://help.aliyun.com/document_detail/308645.html).
16634
18390
  self.hot_ttl = hot_ttl
18391
+ # The retention period of data in the IA storage tier of the Logstore. You must set this parameter to at least 30 days. After the data retention period that you specify for the IA storage tier elapses, the data is moved to the Archive storage tier.
16635
18392
  self.infrequent_access_ttl = infrequent_access_ttl
16636
18393
  # The name of the Logstore.
16637
18394
  #
16638
18395
  # This parameter is required.
16639
18396
  self.logstore_name = logstore_name
16640
- # The maximum number of shards into which existing shards can be automatically split. Valid values: 1 to 64.
18397
+ # The maximum number of shards into which existing shards can be automatically split. Valid values: 1 to 256.
16641
18398
  #
16642
- # > If you set autoSplit to true, you must specify maxSplitShard.
18399
+ # > If you set autoSplit to true, you must specify maxSplitShard.
16643
18400
  self.max_split_shard = max_split_shard
16644
- # The type of the Logstore. Simple Log Service provides two types of Logstores: Standard Logstores and Query Logstores.
18401
+ # The type of the Logstore. Simple Log Service provides two types of Logstores: Standard Logstores and Query Logstores. Valid values:
16645
18402
  #
16646
18403
  # * **standard**: Standard Logstore. This type of Logstore supports the log analysis feature and is suitable for scenarios such as real-time monitoring and interactive analysis. You can also use this type of Logstore to build a comprehensive observability system.
16647
- # * **query**: Query Logstore. This type of Logstore supports high-performance queries. The index traffic fee of a Query Logstore is approximately half that of a Standard Logstore. Query Logstores do not support SQL analysis. Query Logstores are suitable for scenarios in which the volume of data is large, the log retention period is long, or log analysis is not required. Log retention periods of weeks or months are considered long.
18404
+ # * **query**: Query Logstore. This type of Logstore supports high-performance queries. The index traffic fee of a Query Logstore is approximately half that of a Standard Logstore. Query Logstores do not support SQL analysis. Query Logstores are suitable for scenarios in which the amount of data is large, the log retention period is long, or log analysis is not required. If logs are stored for weeks or months, the log retention period is considered long.
16648
18405
  self.mode = mode
18406
+ self.processor_id = processor_id
16649
18407
  # The number of shards.
16650
18408
  #
16651
- # > You cannot call the UpdateLogstore operation to change the number of shards. You can call the SplitShard or MergeShards operation to change the number of shards.
18409
+ # > You cannot call the UpdateLogStore operation to change the number of shards. You can call the SplitShard or MergeShards operation to change the number of shards.
16652
18410
  self.shard_count = shard_count
16653
- # The type of the log that you want to query. Valid values:
18411
+ # The type of the observable data. Valid values:
16654
18412
  #
16655
- # * None: all types of logs.
16656
- # * Metrics: metrics.
18413
+ # * None (default): log data.
18414
+ # * Metrics: metric data.
16657
18415
  self.telemetry_type = telemetry_type
16658
- # The retention period of data. Unit: day. Valid values: 1 to 3650. If you set ttl to 3650, data is permanently stored.
18416
+ # The retention period of data. Unit: days. Valid values: 1 to 3650. If you set this parameter to 3650, logs are permanently stored.
16659
18417
  #
16660
18418
  # This parameter is required.
16661
18419
  self.ttl = ttl
@@ -16688,6 +18446,8 @@ class UpdateLogStoreRequest(TeaModel):
16688
18446
  result['maxSplitShard'] = self.max_split_shard
16689
18447
  if self.mode is not None:
16690
18448
  result['mode'] = self.mode
18449
+ if self.processor_id is not None:
18450
+ result['processorId'] = self.processor_id
16691
18451
  if self.shard_count is not None:
16692
18452
  result['shardCount'] = self.shard_count
16693
18453
  if self.telemetry_type is not None:
@@ -16717,6 +18477,8 @@ class UpdateLogStoreRequest(TeaModel):
16717
18477
  self.max_split_shard = m.get('maxSplitShard')
16718
18478
  if m.get('mode') is not None:
16719
18479
  self.mode = m.get('mode')
18480
+ if m.get('processorId') is not None:
18481
+ self.processor_id = m.get('processorId')
16720
18482
  if m.get('shardCount') is not None:
16721
18483
  self.shard_count = m.get('shardCount')
16722
18484
  if m.get('telemetryType') is not None:
@@ -17267,6 +19029,84 @@ class UpdateMachineGroupMachineResponse(TeaModel):
17267
19029
  return self
17268
19030
 
17269
19031
 
19032
+ class UpdateMetricStoreRequest(TeaModel):
19033
+ def __init__(
19034
+ self,
19035
+ auto_split: bool = None,
19036
+ max_split_shard: int = None,
19037
+ mode: str = None,
19038
+ ttl: int = None,
19039
+ ):
19040
+ self.auto_split = auto_split
19041
+ self.max_split_shard = max_split_shard
19042
+ self.mode = mode
19043
+ self.ttl = ttl
19044
+
19045
+ def validate(self):
19046
+ pass
19047
+
19048
+ def to_map(self):
19049
+ _map = super().to_map()
19050
+ if _map is not None:
19051
+ return _map
19052
+
19053
+ result = dict()
19054
+ if self.auto_split is not None:
19055
+ result['autoSplit'] = self.auto_split
19056
+ if self.max_split_shard is not None:
19057
+ result['maxSplitShard'] = self.max_split_shard
19058
+ if self.mode is not None:
19059
+ result['mode'] = self.mode
19060
+ if self.ttl is not None:
19061
+ result['ttl'] = self.ttl
19062
+ return result
19063
+
19064
+ def from_map(self, m: dict = None):
19065
+ m = m or dict()
19066
+ if m.get('autoSplit') is not None:
19067
+ self.auto_split = m.get('autoSplit')
19068
+ if m.get('maxSplitShard') is not None:
19069
+ self.max_split_shard = m.get('maxSplitShard')
19070
+ if m.get('mode') is not None:
19071
+ self.mode = m.get('mode')
19072
+ if m.get('ttl') is not None:
19073
+ self.ttl = m.get('ttl')
19074
+ return self
19075
+
19076
+
19077
+ class UpdateMetricStoreResponse(TeaModel):
19078
+ def __init__(
19079
+ self,
19080
+ headers: Dict[str, str] = None,
19081
+ status_code: int = None,
19082
+ ):
19083
+ self.headers = headers
19084
+ self.status_code = status_code
19085
+
19086
+ def validate(self):
19087
+ pass
19088
+
19089
+ def to_map(self):
19090
+ _map = super().to_map()
19091
+ if _map is not None:
19092
+ return _map
19093
+
19094
+ result = dict()
19095
+ if self.headers is not None:
19096
+ result['headers'] = self.headers
19097
+ if self.status_code is not None:
19098
+ result['statusCode'] = self.status_code
19099
+ return result
19100
+
19101
+ def from_map(self, m: dict = None):
19102
+ m = m or dict()
19103
+ if m.get('headers') is not None:
19104
+ self.headers = m.get('headers')
19105
+ if m.get('statusCode') is not None:
19106
+ self.status_code = m.get('statusCode')
19107
+ return self
19108
+
19109
+
17270
19110
  class UpdateMetricStoreMeteringModeRequest(TeaModel):
17271
19111
  def __init__(
17272
19112
  self,
@@ -17335,8 +19175,11 @@ class UpdateOSSExportRequest(TeaModel):
17335
19175
  description: str = None,
17336
19176
  display_name: str = None,
17337
19177
  ):
19178
+ # The configuration details of the job.
17338
19179
  self.configuration = configuration
19180
+ # The description of the job.
17339
19181
  self.description = description
19182
+ # The display name of the job.
17340
19183
  self.display_name = display_name
17341
19184
 
17342
19185
  def validate(self):
@@ -17409,8 +19252,11 @@ class UpdateOSSHDFSExportRequest(TeaModel):
17409
19252
  description: str = None,
17410
19253
  display_name: str = None,
17411
19254
  ):
19255
+ # The configuration details of the job.
17412
19256
  self.configuration = configuration
19257
+ # The description of the job.
17413
19258
  self.description = description
19259
+ # The display name of the job.
17414
19260
  self.display_name = display_name
17415
19261
 
17416
19262
  def validate(self):
@@ -17484,11 +19330,17 @@ class UpdateOSSIngestionRequest(TeaModel):
17484
19330
  display_name: str = None,
17485
19331
  schedule: Schedule = None,
17486
19332
  ):
19333
+ # The configurations of the OSS data import job.
19334
+ #
17487
19335
  # This parameter is required.
17488
19336
  self.configuration = configuration
19337
+ # The description of the OSS data import job.
17489
19338
  self.description = description
19339
+ # The display name of the OSS data import job.
19340
+ #
17490
19341
  # This parameter is required.
17491
19342
  self.display_name = display_name
19343
+ # The scheduling type. By default, you do not need to specify this parameter. If you want to import data at regular intervals, such as importing data every Monday at 08: 00., you can specify a cron expression.
17492
19344
  self.schedule = schedule
17493
19345
 
17494
19346
  def validate(self):
@@ -18267,8 +20119,30 @@ class UpdateStoreViewRequest(TeaModel):
18267
20119
  store_type: str = None,
18268
20120
  stores: List[StoreViewStore] = None,
18269
20121
  ):
20122
+ # The type of the dataset.
20123
+ #
20124
+ # Valid values:
20125
+ #
20126
+ # * metricstore
20127
+ #
20128
+ # <!-- -->
20129
+ #
20130
+ # <!-- -->
20131
+ #
20132
+ # <!-- -->
20133
+ #
20134
+ # * logstore
20135
+ #
20136
+ # <!-- -->
20137
+ #
20138
+ # <!-- -->
20139
+ #
20140
+ # <!-- -->
20141
+ #
18270
20142
  # This parameter is required.
18271
20143
  self.store_type = store_type
20144
+ # The Logstores or Metricstores.
20145
+ #
18272
20146
  # This parameter is required.
18273
20147
  self.stores = stores
18274
20148