alibabacloud-sls20201230 5.4.0__py3-none-any.whl → 5.4.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- alibabacloud_sls20201230/__init__.py +1 -1
- alibabacloud_sls20201230/client.py +4 -186
- alibabacloud_sls20201230/models.py +175 -528
- {alibabacloud_sls20201230-5.4.0.dist-info → alibabacloud_sls20201230-5.4.1.dist-info}/METADATA +1 -1
- alibabacloud_sls20201230-5.4.1.dist-info/RECORD +8 -0
- alibabacloud_sls20201230-5.4.0.dist-info/RECORD +0 -8
- {alibabacloud_sls20201230-5.4.0.dist-info → alibabacloud_sls20201230-5.4.1.dist-info}/LICENSE +0 -0
- {alibabacloud_sls20201230-5.4.0.dist-info → alibabacloud_sls20201230-5.4.1.dist-info}/WHEEL +0 -0
- {alibabacloud_sls20201230-5.4.0.dist-info → alibabacloud_sls20201230-5.4.1.dist-info}/top_level.txt +0 -0
@@ -1 +1 @@
|
|
1
|
-
__version__ = '5.4.
|
1
|
+
__version__ = '5.4.1'
|
@@ -1745,25 +1745,10 @@ class Client(OpenApiClient):
|
|
1745
1745
|
UtilClient.validate_model(request)
|
1746
1746
|
host_map = {}
|
1747
1747
|
host_map['project'] = project
|
1748
|
-
body = {}
|
1749
|
-
if not UtilClient.is_unset(request.keys):
|
1750
|
-
body['keys'] = request.keys
|
1751
|
-
if not UtilClient.is_unset(request.line):
|
1752
|
-
body['line'] = request.line
|
1753
|
-
if not UtilClient.is_unset(request.log_reduce):
|
1754
|
-
body['log_reduce'] = request.log_reduce
|
1755
|
-
if not UtilClient.is_unset(request.log_reduce_black_list):
|
1756
|
-
body['log_reduce_black_list'] = request.log_reduce_black_list
|
1757
|
-
if not UtilClient.is_unset(request.log_reduce_white_list):
|
1758
|
-
body['log_reduce_white_list'] = request.log_reduce_white_list
|
1759
|
-
if not UtilClient.is_unset(request.max_text_len):
|
1760
|
-
body['max_text_len'] = request.max_text_len
|
1761
|
-
if not UtilClient.is_unset(request.ttl):
|
1762
|
-
body['ttl'] = request.ttl
|
1763
1748
|
req = open_api_models.OpenApiRequest(
|
1764
1749
|
host_map=host_map,
|
1765
1750
|
headers=headers,
|
1766
|
-
body=OpenApiUtilClient.parse_to_map(body)
|
1751
|
+
body=OpenApiUtilClient.parse_to_map(request.body)
|
1767
1752
|
)
|
1768
1753
|
params = open_api_models.Params(
|
1769
1754
|
action='CreateIndex',
|
@@ -1802,25 +1787,10 @@ class Client(OpenApiClient):
|
|
1802
1787
|
UtilClient.validate_model(request)
|
1803
1788
|
host_map = {}
|
1804
1789
|
host_map['project'] = project
|
1805
|
-
body = {}
|
1806
|
-
if not UtilClient.is_unset(request.keys):
|
1807
|
-
body['keys'] = request.keys
|
1808
|
-
if not UtilClient.is_unset(request.line):
|
1809
|
-
body['line'] = request.line
|
1810
|
-
if not UtilClient.is_unset(request.log_reduce):
|
1811
|
-
body['log_reduce'] = request.log_reduce
|
1812
|
-
if not UtilClient.is_unset(request.log_reduce_black_list):
|
1813
|
-
body['log_reduce_black_list'] = request.log_reduce_black_list
|
1814
|
-
if not UtilClient.is_unset(request.log_reduce_white_list):
|
1815
|
-
body['log_reduce_white_list'] = request.log_reduce_white_list
|
1816
|
-
if not UtilClient.is_unset(request.max_text_len):
|
1817
|
-
body['max_text_len'] = request.max_text_len
|
1818
|
-
if not UtilClient.is_unset(request.ttl):
|
1819
|
-
body['ttl'] = request.ttl
|
1820
1790
|
req = open_api_models.OpenApiRequest(
|
1821
1791
|
host_map=host_map,
|
1822
1792
|
headers=headers,
|
1823
|
-
body=OpenApiUtilClient.parse_to_map(body)
|
1793
|
+
body=OpenApiUtilClient.parse_to_map(request.body)
|
1824
1794
|
)
|
1825
1795
|
params = open_api_models.Params(
|
1826
1796
|
action='CreateIndex',
|
@@ -15860,128 +15830,6 @@ class Client(OpenApiClient):
|
|
15860
15830
|
headers = {}
|
15861
15831
|
return await self.put_webtracking_with_options_async(project, logstore_name, request, headers, runtime)
|
15862
15832
|
|
15863
|
-
def query_mlservice_results_with_options(
|
15864
|
-
self,
|
15865
|
-
service_name: str,
|
15866
|
-
request: sls_20201230_models.QueryMLServiceResultsRequest,
|
15867
|
-
headers: Dict[str, str],
|
15868
|
-
runtime: util_models.RuntimeOptions,
|
15869
|
-
) -> sls_20201230_models.QueryMLServiceResultsResponse:
|
15870
|
-
"""
|
15871
|
-
@deprecated OpenAPI QueryMLServiceResults is deprecated
|
15872
|
-
|
15873
|
-
@summary queryMLServiceResults
|
15874
|
-
|
15875
|
-
@param request: QueryMLServiceResultsRequest
|
15876
|
-
@param headers: map
|
15877
|
-
@param runtime: runtime options for this request RuntimeOptions
|
15878
|
-
@return: QueryMLServiceResultsResponse
|
15879
|
-
Deprecated
|
15880
|
-
"""
|
15881
|
-
UtilClient.validate_model(request)
|
15882
|
-
query = {}
|
15883
|
-
if not UtilClient.is_unset(request.allow_builtin):
|
15884
|
-
query['allowBuiltin'] = request.allow_builtin
|
15885
|
-
req = open_api_models.OpenApiRequest(
|
15886
|
-
headers=headers,
|
15887
|
-
query=OpenApiUtilClient.query(query),
|
15888
|
-
body=OpenApiUtilClient.parse_to_map(request.body)
|
15889
|
-
)
|
15890
|
-
params = open_api_models.Params(
|
15891
|
-
action='QueryMLServiceResults',
|
15892
|
-
version='2020-12-30',
|
15893
|
-
protocol='HTTPS',
|
15894
|
-
pathname=f'/ml/service/{service_name}/analysis',
|
15895
|
-
method='POST',
|
15896
|
-
auth_type='AK',
|
15897
|
-
style='ROA',
|
15898
|
-
req_body_type='json',
|
15899
|
-
body_type='json'
|
15900
|
-
)
|
15901
|
-
return TeaCore.from_map(
|
15902
|
-
sls_20201230_models.QueryMLServiceResultsResponse(),
|
15903
|
-
self.execute(params, req, runtime)
|
15904
|
-
)
|
15905
|
-
|
15906
|
-
async def query_mlservice_results_with_options_async(
|
15907
|
-
self,
|
15908
|
-
service_name: str,
|
15909
|
-
request: sls_20201230_models.QueryMLServiceResultsRequest,
|
15910
|
-
headers: Dict[str, str],
|
15911
|
-
runtime: util_models.RuntimeOptions,
|
15912
|
-
) -> sls_20201230_models.QueryMLServiceResultsResponse:
|
15913
|
-
"""
|
15914
|
-
@deprecated OpenAPI QueryMLServiceResults is deprecated
|
15915
|
-
|
15916
|
-
@summary queryMLServiceResults
|
15917
|
-
|
15918
|
-
@param request: QueryMLServiceResultsRequest
|
15919
|
-
@param headers: map
|
15920
|
-
@param runtime: runtime options for this request RuntimeOptions
|
15921
|
-
@return: QueryMLServiceResultsResponse
|
15922
|
-
Deprecated
|
15923
|
-
"""
|
15924
|
-
UtilClient.validate_model(request)
|
15925
|
-
query = {}
|
15926
|
-
if not UtilClient.is_unset(request.allow_builtin):
|
15927
|
-
query['allowBuiltin'] = request.allow_builtin
|
15928
|
-
req = open_api_models.OpenApiRequest(
|
15929
|
-
headers=headers,
|
15930
|
-
query=OpenApiUtilClient.query(query),
|
15931
|
-
body=OpenApiUtilClient.parse_to_map(request.body)
|
15932
|
-
)
|
15933
|
-
params = open_api_models.Params(
|
15934
|
-
action='QueryMLServiceResults',
|
15935
|
-
version='2020-12-30',
|
15936
|
-
protocol='HTTPS',
|
15937
|
-
pathname=f'/ml/service/{service_name}/analysis',
|
15938
|
-
method='POST',
|
15939
|
-
auth_type='AK',
|
15940
|
-
style='ROA',
|
15941
|
-
req_body_type='json',
|
15942
|
-
body_type='json'
|
15943
|
-
)
|
15944
|
-
return TeaCore.from_map(
|
15945
|
-
sls_20201230_models.QueryMLServiceResultsResponse(),
|
15946
|
-
await self.execute_async(params, req, runtime)
|
15947
|
-
)
|
15948
|
-
|
15949
|
-
def query_mlservice_results(
|
15950
|
-
self,
|
15951
|
-
service_name: str,
|
15952
|
-
request: sls_20201230_models.QueryMLServiceResultsRequest,
|
15953
|
-
) -> sls_20201230_models.QueryMLServiceResultsResponse:
|
15954
|
-
"""
|
15955
|
-
@deprecated OpenAPI QueryMLServiceResults is deprecated
|
15956
|
-
|
15957
|
-
@summary queryMLServiceResults
|
15958
|
-
|
15959
|
-
@param request: QueryMLServiceResultsRequest
|
15960
|
-
@return: QueryMLServiceResultsResponse
|
15961
|
-
Deprecated
|
15962
|
-
"""
|
15963
|
-
runtime = util_models.RuntimeOptions()
|
15964
|
-
headers = {}
|
15965
|
-
return self.query_mlservice_results_with_options(service_name, request, headers, runtime)
|
15966
|
-
|
15967
|
-
async def query_mlservice_results_async(
|
15968
|
-
self,
|
15969
|
-
service_name: str,
|
15970
|
-
request: sls_20201230_models.QueryMLServiceResultsRequest,
|
15971
|
-
) -> sls_20201230_models.QueryMLServiceResultsResponse:
|
15972
|
-
"""
|
15973
|
-
@deprecated OpenAPI QueryMLServiceResults is deprecated
|
15974
|
-
|
15975
|
-
@summary queryMLServiceResults
|
15976
|
-
|
15977
|
-
@param request: QueryMLServiceResultsRequest
|
15978
|
-
@return: QueryMLServiceResultsResponse
|
15979
|
-
Deprecated
|
15980
|
-
"""
|
15981
|
-
runtime = util_models.RuntimeOptions()
|
15982
|
-
headers = {}
|
15983
|
-
return await self.query_mlservice_results_with_options_async(service_name, request, headers, runtime)
|
15984
|
-
|
15985
15833
|
def refresh_token_with_options(
|
15986
15834
|
self,
|
15987
15835
|
request: sls_20201230_models.RefreshTokenRequest,
|
@@ -18378,25 +18226,10 @@ class Client(OpenApiClient):
|
|
18378
18226
|
UtilClient.validate_model(request)
|
18379
18227
|
host_map = {}
|
18380
18228
|
host_map['project'] = project
|
18381
|
-
body = {}
|
18382
|
-
if not UtilClient.is_unset(request.keys):
|
18383
|
-
body['keys'] = request.keys
|
18384
|
-
if not UtilClient.is_unset(request.line):
|
18385
|
-
body['line'] = request.line
|
18386
|
-
if not UtilClient.is_unset(request.log_reduce):
|
18387
|
-
body['log_reduce'] = request.log_reduce
|
18388
|
-
if not UtilClient.is_unset(request.log_reduce_black_list):
|
18389
|
-
body['log_reduce_black_list'] = request.log_reduce_black_list
|
18390
|
-
if not UtilClient.is_unset(request.log_reduce_white_list):
|
18391
|
-
body['log_reduce_white_list'] = request.log_reduce_white_list
|
18392
|
-
if not UtilClient.is_unset(request.max_text_len):
|
18393
|
-
body['max_text_len'] = request.max_text_len
|
18394
|
-
if not UtilClient.is_unset(request.ttl):
|
18395
|
-
body['ttl'] = request.ttl
|
18396
18229
|
req = open_api_models.OpenApiRequest(
|
18397
18230
|
host_map=host_map,
|
18398
18231
|
headers=headers,
|
18399
|
-
body=OpenApiUtilClient.parse_to_map(body)
|
18232
|
+
body=OpenApiUtilClient.parse_to_map(request.body)
|
18400
18233
|
)
|
18401
18234
|
params = open_api_models.Params(
|
18402
18235
|
action='UpdateIndex',
|
@@ -18436,25 +18269,10 @@ class Client(OpenApiClient):
|
|
18436
18269
|
UtilClient.validate_model(request)
|
18437
18270
|
host_map = {}
|
18438
18271
|
host_map['project'] = project
|
18439
|
-
body = {}
|
18440
|
-
if not UtilClient.is_unset(request.keys):
|
18441
|
-
body['keys'] = request.keys
|
18442
|
-
if not UtilClient.is_unset(request.line):
|
18443
|
-
body['line'] = request.line
|
18444
|
-
if not UtilClient.is_unset(request.log_reduce):
|
18445
|
-
body['log_reduce'] = request.log_reduce
|
18446
|
-
if not UtilClient.is_unset(request.log_reduce_black_list):
|
18447
|
-
body['log_reduce_black_list'] = request.log_reduce_black_list
|
18448
|
-
if not UtilClient.is_unset(request.log_reduce_white_list):
|
18449
|
-
body['log_reduce_white_list'] = request.log_reduce_white_list
|
18450
|
-
if not UtilClient.is_unset(request.max_text_len):
|
18451
|
-
body['max_text_len'] = request.max_text_len
|
18452
|
-
if not UtilClient.is_unset(request.ttl):
|
18453
|
-
body['ttl'] = request.ttl
|
18454
18272
|
req = open_api_models.OpenApiRequest(
|
18455
18273
|
host_map=host_map,
|
18456
18274
|
headers=headers,
|
18457
|
-
body=OpenApiUtilClient.parse_to_map(body)
|
18275
|
+
body=OpenApiUtilClient.parse_to_map(request.body)
|
18458
18276
|
)
|
18459
18277
|
params = open_api_models.Params(
|
18460
18278
|
action='UpdateIndex',
|
@@ -1344,6 +1344,148 @@ class Histogram(TeaModel):
|
|
1344
1344
|
return self
|
1345
1345
|
|
1346
1346
|
|
1347
|
+
class IndexJsonKey(TeaModel):
|
1348
|
+
def __init__(
|
1349
|
+
self,
|
1350
|
+
alias: str = None,
|
1351
|
+
case_sensitive: bool = None,
|
1352
|
+
chn: bool = None,
|
1353
|
+
doc_value: bool = None,
|
1354
|
+
token: List[str] = None,
|
1355
|
+
type: str = None,
|
1356
|
+
):
|
1357
|
+
self.alias = alias
|
1358
|
+
self.case_sensitive = case_sensitive
|
1359
|
+
self.chn = chn
|
1360
|
+
self.doc_value = doc_value
|
1361
|
+
self.token = token
|
1362
|
+
# This parameter is required.
|
1363
|
+
self.type = type
|
1364
|
+
|
1365
|
+
def validate(self):
|
1366
|
+
pass
|
1367
|
+
|
1368
|
+
def to_map(self):
|
1369
|
+
_map = super().to_map()
|
1370
|
+
if _map is not None:
|
1371
|
+
return _map
|
1372
|
+
|
1373
|
+
result = dict()
|
1374
|
+
if self.alias is not None:
|
1375
|
+
result['alias'] = self.alias
|
1376
|
+
if self.case_sensitive is not None:
|
1377
|
+
result['caseSensitive'] = self.case_sensitive
|
1378
|
+
if self.chn is not None:
|
1379
|
+
result['chn'] = self.chn
|
1380
|
+
if self.doc_value is not None:
|
1381
|
+
result['doc_value'] = self.doc_value
|
1382
|
+
if self.token is not None:
|
1383
|
+
result['token'] = self.token
|
1384
|
+
if self.type is not None:
|
1385
|
+
result['type'] = self.type
|
1386
|
+
return result
|
1387
|
+
|
1388
|
+
def from_map(self, m: dict = None):
|
1389
|
+
m = m or dict()
|
1390
|
+
if m.get('alias') is not None:
|
1391
|
+
self.alias = m.get('alias')
|
1392
|
+
if m.get('caseSensitive') is not None:
|
1393
|
+
self.case_sensitive = m.get('caseSensitive')
|
1394
|
+
if m.get('chn') is not None:
|
1395
|
+
self.chn = m.get('chn')
|
1396
|
+
if m.get('doc_value') is not None:
|
1397
|
+
self.doc_value = m.get('doc_value')
|
1398
|
+
if m.get('token') is not None:
|
1399
|
+
self.token = m.get('token')
|
1400
|
+
if m.get('type') is not None:
|
1401
|
+
self.type = m.get('type')
|
1402
|
+
return self
|
1403
|
+
|
1404
|
+
|
1405
|
+
class IndexKey(TeaModel):
|
1406
|
+
def __init__(
|
1407
|
+
self,
|
1408
|
+
alias: str = None,
|
1409
|
+
case_sensitive: bool = None,
|
1410
|
+
chn: bool = None,
|
1411
|
+
doc_value: bool = None,
|
1412
|
+
index_all: bool = None,
|
1413
|
+
json_keys: Dict[str, IndexJsonKey] = None,
|
1414
|
+
max_depth: int = None,
|
1415
|
+
token: List[str] = None,
|
1416
|
+
type: str = None,
|
1417
|
+
):
|
1418
|
+
self.alias = alias
|
1419
|
+
self.case_sensitive = case_sensitive
|
1420
|
+
self.chn = chn
|
1421
|
+
self.doc_value = doc_value
|
1422
|
+
self.index_all = index_all
|
1423
|
+
self.json_keys = json_keys
|
1424
|
+
self.max_depth = max_depth
|
1425
|
+
self.token = token
|
1426
|
+
# This parameter is required.
|
1427
|
+
self.type = type
|
1428
|
+
|
1429
|
+
def validate(self):
|
1430
|
+
if self.json_keys:
|
1431
|
+
for v in self.json_keys.values():
|
1432
|
+
if v:
|
1433
|
+
v.validate()
|
1434
|
+
|
1435
|
+
def to_map(self):
|
1436
|
+
_map = super().to_map()
|
1437
|
+
if _map is not None:
|
1438
|
+
return _map
|
1439
|
+
|
1440
|
+
result = dict()
|
1441
|
+
if self.alias is not None:
|
1442
|
+
result['alias'] = self.alias
|
1443
|
+
if self.case_sensitive is not None:
|
1444
|
+
result['caseSensitive'] = self.case_sensitive
|
1445
|
+
if self.chn is not None:
|
1446
|
+
result['chn'] = self.chn
|
1447
|
+
if self.doc_value is not None:
|
1448
|
+
result['doc_value'] = self.doc_value
|
1449
|
+
if self.index_all is not None:
|
1450
|
+
result['index_all'] = self.index_all
|
1451
|
+
result['json_keys'] = {}
|
1452
|
+
if self.json_keys is not None:
|
1453
|
+
for k, v in self.json_keys.items():
|
1454
|
+
result['json_keys'][k] = v.to_map()
|
1455
|
+
if self.max_depth is not None:
|
1456
|
+
result['max_depth'] = self.max_depth
|
1457
|
+
if self.token is not None:
|
1458
|
+
result['token'] = self.token
|
1459
|
+
if self.type is not None:
|
1460
|
+
result['type'] = self.type
|
1461
|
+
return result
|
1462
|
+
|
1463
|
+
def from_map(self, m: dict = None):
|
1464
|
+
m = m or dict()
|
1465
|
+
if m.get('alias') is not None:
|
1466
|
+
self.alias = m.get('alias')
|
1467
|
+
if m.get('caseSensitive') is not None:
|
1468
|
+
self.case_sensitive = m.get('caseSensitive')
|
1469
|
+
if m.get('chn') is not None:
|
1470
|
+
self.chn = m.get('chn')
|
1471
|
+
if m.get('doc_value') is not None:
|
1472
|
+
self.doc_value = m.get('doc_value')
|
1473
|
+
if m.get('index_all') is not None:
|
1474
|
+
self.index_all = m.get('index_all')
|
1475
|
+
self.json_keys = {}
|
1476
|
+
if m.get('json_keys') is not None:
|
1477
|
+
for k, v in m.get('json_keys').items():
|
1478
|
+
temp_model = IndexJsonKey()
|
1479
|
+
self.json_keys[k] = temp_model.from_map(v)
|
1480
|
+
if m.get('max_depth') is not None:
|
1481
|
+
self.max_depth = m.get('max_depth')
|
1482
|
+
if m.get('token') is not None:
|
1483
|
+
self.token = m.get('token')
|
1484
|
+
if m.get('type') is not None:
|
1485
|
+
self.type = m.get('type')
|
1486
|
+
return self
|
1487
|
+
|
1488
|
+
|
1347
1489
|
class IngestProcessorConfiguration(TeaModel):
|
1348
1490
|
def __init__(
|
1349
1491
|
self,
|
@@ -3789,85 +3931,22 @@ class IndexLine(TeaModel):
|
|
3789
3931
|
return self
|
3790
3932
|
|
3791
3933
|
|
3792
|
-
class IndexKeysValue(TeaModel):
|
3793
|
-
def __init__(
|
3794
|
-
self,
|
3795
|
-
chn: bool = None,
|
3796
|
-
case_sensitive: bool = None,
|
3797
|
-
token: List[str] = None,
|
3798
|
-
alias: str = None,
|
3799
|
-
type: str = None,
|
3800
|
-
doc_value: bool = None,
|
3801
|
-
):
|
3802
|
-
self.chn = chn
|
3803
|
-
self.case_sensitive = case_sensitive
|
3804
|
-
self.token = token
|
3805
|
-
self.alias = alias
|
3806
|
-
# This parameter is required.
|
3807
|
-
self.type = type
|
3808
|
-
self.doc_value = doc_value
|
3809
|
-
|
3810
|
-
def validate(self):
|
3811
|
-
pass
|
3812
|
-
|
3813
|
-
def to_map(self):
|
3814
|
-
_map = super().to_map()
|
3815
|
-
if _map is not None:
|
3816
|
-
return _map
|
3817
|
-
|
3818
|
-
result = dict()
|
3819
|
-
if self.chn is not None:
|
3820
|
-
result['chn'] = self.chn
|
3821
|
-
if self.case_sensitive is not None:
|
3822
|
-
result['caseSensitive'] = self.case_sensitive
|
3823
|
-
if self.token is not None:
|
3824
|
-
result['token'] = self.token
|
3825
|
-
if self.alias is not None:
|
3826
|
-
result['alias'] = self.alias
|
3827
|
-
if self.type is not None:
|
3828
|
-
result['type'] = self.type
|
3829
|
-
if self.doc_value is not None:
|
3830
|
-
result['doc_value'] = self.doc_value
|
3831
|
-
return result
|
3832
|
-
|
3833
|
-
def from_map(self, m: dict = None):
|
3834
|
-
m = m or dict()
|
3835
|
-
if m.get('chn') is not None:
|
3836
|
-
self.chn = m.get('chn')
|
3837
|
-
if m.get('caseSensitive') is not None:
|
3838
|
-
self.case_sensitive = m.get('caseSensitive')
|
3839
|
-
if m.get('token') is not None:
|
3840
|
-
self.token = m.get('token')
|
3841
|
-
if m.get('alias') is not None:
|
3842
|
-
self.alias = m.get('alias')
|
3843
|
-
if m.get('type') is not None:
|
3844
|
-
self.type = m.get('type')
|
3845
|
-
if m.get('doc_value') is not None:
|
3846
|
-
self.doc_value = m.get('doc_value')
|
3847
|
-
return self
|
3848
|
-
|
3849
|
-
|
3850
3934
|
class Index(TeaModel):
|
3851
3935
|
def __init__(
|
3852
3936
|
self,
|
3853
|
-
keys: Dict[str,
|
3854
|
-
last_modify_time: int = None,
|
3937
|
+
keys: Dict[str, IndexKey] = None,
|
3855
3938
|
line: IndexLine = None,
|
3856
3939
|
log_reduce: bool = None,
|
3857
3940
|
log_reduce_black_list: List[str] = None,
|
3858
3941
|
log_reduce_white_list: List[str] = None,
|
3859
3942
|
max_text_len: int = None,
|
3860
|
-
ttl: int = None,
|
3861
3943
|
):
|
3862
3944
|
self.keys = keys
|
3863
|
-
self.last_modify_time = last_modify_time
|
3864
3945
|
self.line = line
|
3865
3946
|
self.log_reduce = log_reduce
|
3866
3947
|
self.log_reduce_black_list = log_reduce_black_list
|
3867
3948
|
self.log_reduce_white_list = log_reduce_white_list
|
3868
3949
|
self.max_text_len = max_text_len
|
3869
|
-
# This parameter is required.
|
3870
|
-
self.ttl = ttl
|
3871
3950
|
|
3872
3951
|
def validate(self):
|
3873
3952
|
if self.keys:
|
@@ -3887,8 +3966,6 @@ class Index(TeaModel):
|
|
3887
3966
|
if self.keys is not None:
|
3888
3967
|
for k, v in self.keys.items():
|
3889
3968
|
result['keys'][k] = v.to_map()
|
3890
|
-
if self.last_modify_time is not None:
|
3891
|
-
result['lastModifyTime'] = self.last_modify_time
|
3892
3969
|
if self.line is not None:
|
3893
3970
|
result['line'] = self.line.to_map()
|
3894
3971
|
if self.log_reduce is not None:
|
@@ -3899,8 +3976,6 @@ class Index(TeaModel):
|
|
3899
3976
|
result['log_reduce_white_list'] = self.log_reduce_white_list
|
3900
3977
|
if self.max_text_len is not None:
|
3901
3978
|
result['max_text_len'] = self.max_text_len
|
3902
|
-
if self.ttl is not None:
|
3903
|
-
result['ttl'] = self.ttl
|
3904
3979
|
return result
|
3905
3980
|
|
3906
3981
|
def from_map(self, m: dict = None):
|
@@ -3908,10 +3983,8 @@ class Index(TeaModel):
|
|
3908
3983
|
self.keys = {}
|
3909
3984
|
if m.get('keys') is not None:
|
3910
3985
|
for k, v in m.get('keys').items():
|
3911
|
-
temp_model =
|
3986
|
+
temp_model = IndexKey()
|
3912
3987
|
self.keys[k] = temp_model.from_map(v)
|
3913
|
-
if m.get('lastModifyTime') is not None:
|
3914
|
-
self.last_modify_time = m.get('lastModifyTime')
|
3915
3988
|
if m.get('line') is not None:
|
3916
3989
|
temp_model = IndexLine()
|
3917
3990
|
self.line = temp_model.from_map(m['line'])
|
@@ -3923,8 +3996,6 @@ class Index(TeaModel):
|
|
3923
3996
|
self.log_reduce_white_list = m.get('log_reduce_white_list')
|
3924
3997
|
if m.get('max_text_len') is not None:
|
3925
3998
|
self.max_text_len = m.get('max_text_len')
|
3926
|
-
if m.get('ttl') is not None:
|
3927
|
-
self.ttl = m.get('ttl')
|
3928
3999
|
return self
|
3929
4000
|
|
3930
4001
|
|
@@ -4429,89 +4500,6 @@ class Shard(TeaModel):
|
|
4429
4500
|
return self
|
4430
4501
|
|
4431
4502
|
|
4432
|
-
class KeysValue(TeaModel):
|
4433
|
-
def __init__(
|
4434
|
-
self,
|
4435
|
-
case_sensitive: bool = None,
|
4436
|
-
chn: bool = None,
|
4437
|
-
type: str = None,
|
4438
|
-
alias: str = None,
|
4439
|
-
token: List[str] = None,
|
4440
|
-
doc_value: bool = None,
|
4441
|
-
vector_index: str = None,
|
4442
|
-
embedding: str = None,
|
4443
|
-
):
|
4444
|
-
# Specifies whether to enable case sensitivity. This parameter is required only when **type** is set to **text**. Valid values:
|
4445
|
-
#
|
4446
|
-
# * true
|
4447
|
-
# * false (default)
|
4448
|
-
self.case_sensitive = case_sensitive
|
4449
|
-
# Specifies whether to include Chinese characters. This parameter is required only when **type** is set to **text**. Valid values:
|
4450
|
-
#
|
4451
|
-
# * true
|
4452
|
-
# * false (default)
|
4453
|
-
self.chn = chn
|
4454
|
-
# The data type of the field value. Valid values: text, json, double, and long.
|
4455
|
-
#
|
4456
|
-
# This parameter is required.
|
4457
|
-
self.type = type
|
4458
|
-
# The alias of the field.
|
4459
|
-
self.alias = alias
|
4460
|
-
# The delimiters that are used to split text.
|
4461
|
-
self.token = token
|
4462
|
-
# Specifies whether to turn on Enable Analytics for the field.
|
4463
|
-
self.doc_value = doc_value
|
4464
|
-
self.vector_index = vector_index
|
4465
|
-
self.embedding = embedding
|
4466
|
-
|
4467
|
-
def validate(self):
|
4468
|
-
pass
|
4469
|
-
|
4470
|
-
def to_map(self):
|
4471
|
-
_map = super().to_map()
|
4472
|
-
if _map is not None:
|
4473
|
-
return _map
|
4474
|
-
|
4475
|
-
result = dict()
|
4476
|
-
if self.case_sensitive is not None:
|
4477
|
-
result['caseSensitive'] = self.case_sensitive
|
4478
|
-
if self.chn is not None:
|
4479
|
-
result['chn'] = self.chn
|
4480
|
-
if self.type is not None:
|
4481
|
-
result['type'] = self.type
|
4482
|
-
if self.alias is not None:
|
4483
|
-
result['alias'] = self.alias
|
4484
|
-
if self.token is not None:
|
4485
|
-
result['token'] = self.token
|
4486
|
-
if self.doc_value is not None:
|
4487
|
-
result['doc_value'] = self.doc_value
|
4488
|
-
if self.vector_index is not None:
|
4489
|
-
result['vector_index'] = self.vector_index
|
4490
|
-
if self.embedding is not None:
|
4491
|
-
result['embedding'] = self.embedding
|
4492
|
-
return result
|
4493
|
-
|
4494
|
-
def from_map(self, m: dict = None):
|
4495
|
-
m = m or dict()
|
4496
|
-
if m.get('caseSensitive') is not None:
|
4497
|
-
self.case_sensitive = m.get('caseSensitive')
|
4498
|
-
if m.get('chn') is not None:
|
4499
|
-
self.chn = m.get('chn')
|
4500
|
-
if m.get('type') is not None:
|
4501
|
-
self.type = m.get('type')
|
4502
|
-
if m.get('alias') is not None:
|
4503
|
-
self.alias = m.get('alias')
|
4504
|
-
if m.get('token') is not None:
|
4505
|
-
self.token = m.get('token')
|
4506
|
-
if m.get('doc_value') is not None:
|
4507
|
-
self.doc_value = m.get('doc_value')
|
4508
|
-
if m.get('vector_index') is not None:
|
4509
|
-
self.vector_index = m.get('vector_index')
|
4510
|
-
if m.get('embedding') is not None:
|
4511
|
-
self.embedding = m.get('embedding')
|
4512
|
-
return self
|
4513
|
-
|
4514
|
-
|
4515
4503
|
class ApplyConfigToMachineGroupResponse(TeaModel):
|
4516
4504
|
def __init__(
|
4517
4505
|
self,
|
@@ -5629,148 +5617,33 @@ class CreateETLResponse(TeaModel):
|
|
5629
5617
|
return self
|
5630
5618
|
|
5631
5619
|
|
5632
|
-
class CreateIndexRequestLine(TeaModel):
|
5633
|
-
def __init__(
|
5634
|
-
self,
|
5635
|
-
case_sensitive: bool = None,
|
5636
|
-
chn: bool = None,
|
5637
|
-
exclude_keys: List[str] = None,
|
5638
|
-
include_keys: List[str] = None,
|
5639
|
-
token: List[str] = None,
|
5640
|
-
):
|
5641
|
-
# Specifies whether to enable case sensitivity. Valid values:
|
5642
|
-
#
|
5643
|
-
# * true
|
5644
|
-
# * false (default)
|
5645
|
-
self.case_sensitive = case_sensitive
|
5646
|
-
# Specifies whether to include Chinese characters. Valid values:
|
5647
|
-
#
|
5648
|
-
# * true
|
5649
|
-
# * false (default)
|
5650
|
-
self.chn = chn
|
5651
|
-
# The excluded fields. You cannot specify both include_keys and exclude_keys.
|
5652
|
-
self.exclude_keys = exclude_keys
|
5653
|
-
# The included fields. You cannot specify both include_keys and exclude_keys.
|
5654
|
-
self.include_keys = include_keys
|
5655
|
-
# The delimiters. You can specify a delimiter to delimit the content of a field value. For more information about delimiters, see Example.
|
5656
|
-
#
|
5657
|
-
# This parameter is required.
|
5658
|
-
self.token = token
|
5659
|
-
|
5660
|
-
def validate(self):
|
5661
|
-
pass
|
5662
|
-
|
5663
|
-
def to_map(self):
|
5664
|
-
_map = super().to_map()
|
5665
|
-
if _map is not None:
|
5666
|
-
return _map
|
5667
|
-
|
5668
|
-
result = dict()
|
5669
|
-
if self.case_sensitive is not None:
|
5670
|
-
result['caseSensitive'] = self.case_sensitive
|
5671
|
-
if self.chn is not None:
|
5672
|
-
result['chn'] = self.chn
|
5673
|
-
if self.exclude_keys is not None:
|
5674
|
-
result['exclude_keys'] = self.exclude_keys
|
5675
|
-
if self.include_keys is not None:
|
5676
|
-
result['include_keys'] = self.include_keys
|
5677
|
-
if self.token is not None:
|
5678
|
-
result['token'] = self.token
|
5679
|
-
return result
|
5680
|
-
|
5681
|
-
def from_map(self, m: dict = None):
|
5682
|
-
m = m or dict()
|
5683
|
-
if m.get('caseSensitive') is not None:
|
5684
|
-
self.case_sensitive = m.get('caseSensitive')
|
5685
|
-
if m.get('chn') is not None:
|
5686
|
-
self.chn = m.get('chn')
|
5687
|
-
if m.get('exclude_keys') is not None:
|
5688
|
-
self.exclude_keys = m.get('exclude_keys')
|
5689
|
-
if m.get('include_keys') is not None:
|
5690
|
-
self.include_keys = m.get('include_keys')
|
5691
|
-
if m.get('token') is not None:
|
5692
|
-
self.token = m.get('token')
|
5693
|
-
return self
|
5694
|
-
|
5695
|
-
|
5696
5620
|
class CreateIndexRequest(TeaModel):
|
5697
5621
|
def __init__(
|
5698
5622
|
self,
|
5699
|
-
|
5700
|
-
line: CreateIndexRequestLine = None,
|
5701
|
-
log_reduce: bool = None,
|
5702
|
-
log_reduce_black_list: List[str] = None,
|
5703
|
-
log_reduce_white_list: List[str] = None,
|
5704
|
-
max_text_len: int = None,
|
5705
|
-
ttl: int = None,
|
5623
|
+
body: Index = None,
|
5706
5624
|
):
|
5707
|
-
# The
|
5708
|
-
self.
|
5709
|
-
# The configuration of full-text indexes. You must specify this parameter, the keys parameter, or both parameters. For more information, see Example.
|
5710
|
-
self.line = line
|
5711
|
-
# Specifies whether to turn on LogReduce. After you turn on LogReduce, either the whitelist or blacklist takes effect.
|
5712
|
-
self.log_reduce = log_reduce
|
5713
|
-
# The fields in the blacklist that you want to use to cluster logs.
|
5714
|
-
self.log_reduce_black_list = log_reduce_black_list
|
5715
|
-
# The fields in the whitelist that you want to use to cluster logs.
|
5716
|
-
self.log_reduce_white_list = log_reduce_white_list
|
5717
|
-
# The maximum length of a field value that can be retained. Default value: 2048. Unit: bytes. The default value is equal to 2 KB. You can change the value of max_text_len. Valid values: 64 to 16384.
|
5718
|
-
self.max_text_len = max_text_len
|
5719
|
-
# The retention period of logs. Unit: days. Valid values: 7, 30, and 90.
|
5720
|
-
self.ttl = ttl
|
5625
|
+
# The request body.
|
5626
|
+
self.body = body
|
5721
5627
|
|
5722
5628
|
def validate(self):
|
5723
|
-
if self.
|
5724
|
-
|
5725
|
-
if v:
|
5726
|
-
v.validate()
|
5727
|
-
if self.line:
|
5728
|
-
self.line.validate()
|
5629
|
+
if self.body:
|
5630
|
+
self.body.validate()
|
5729
5631
|
|
5730
5632
|
def to_map(self):
|
5731
|
-
_map = super().to_map()
|
5732
|
-
if _map is not None:
|
5733
|
-
return _map
|
5734
|
-
|
5735
|
-
result = dict()
|
5736
|
-
|
5737
|
-
|
5738
|
-
for k, v in self.keys.items():
|
5739
|
-
result['keys'][k] = v.to_map()
|
5740
|
-
if self.line is not None:
|
5741
|
-
result['line'] = self.line.to_map()
|
5742
|
-
if self.log_reduce is not None:
|
5743
|
-
result['log_reduce'] = self.log_reduce
|
5744
|
-
if self.log_reduce_black_list is not None:
|
5745
|
-
result['log_reduce_black_list'] = self.log_reduce_black_list
|
5746
|
-
if self.log_reduce_white_list is not None:
|
5747
|
-
result['log_reduce_white_list'] = self.log_reduce_white_list
|
5748
|
-
if self.max_text_len is not None:
|
5749
|
-
result['max_text_len'] = self.max_text_len
|
5750
|
-
if self.ttl is not None:
|
5751
|
-
result['ttl'] = self.ttl
|
5633
|
+
_map = super().to_map()
|
5634
|
+
if _map is not None:
|
5635
|
+
return _map
|
5636
|
+
|
5637
|
+
result = dict()
|
5638
|
+
if self.body is not None:
|
5639
|
+
result['body'] = self.body.to_map()
|
5752
5640
|
return result
|
5753
5641
|
|
5754
5642
|
def from_map(self, m: dict = None):
|
5755
5643
|
m = m or dict()
|
5756
|
-
|
5757
|
-
|
5758
|
-
|
5759
|
-
temp_model = KeysValue()
|
5760
|
-
self.keys[k] = temp_model.from_map(v)
|
5761
|
-
if m.get('line') is not None:
|
5762
|
-
temp_model = CreateIndexRequestLine()
|
5763
|
-
self.line = temp_model.from_map(m['line'])
|
5764
|
-
if m.get('log_reduce') is not None:
|
5765
|
-
self.log_reduce = m.get('log_reduce')
|
5766
|
-
if m.get('log_reduce_black_list') is not None:
|
5767
|
-
self.log_reduce_black_list = m.get('log_reduce_black_list')
|
5768
|
-
if m.get('log_reduce_white_list') is not None:
|
5769
|
-
self.log_reduce_white_list = m.get('log_reduce_white_list')
|
5770
|
-
if m.get('max_text_len') is not None:
|
5771
|
-
self.max_text_len = m.get('max_text_len')
|
5772
|
-
if m.get('ttl') is not None:
|
5773
|
-
self.ttl = m.get('ttl')
|
5644
|
+
if m.get('body') is not None:
|
5645
|
+
temp_model = Index()
|
5646
|
+
self.body = temp_model.from_map(m['body'])
|
5774
5647
|
return self
|
5775
5648
|
|
5776
5649
|
|
@@ -10516,6 +10389,8 @@ class GetIndexResponseBodyLine(TeaModel):
|
|
10516
10389
|
# The included fields.
|
10517
10390
|
self.include_keys = include_keys
|
10518
10391
|
# The delimiters.
|
10392
|
+
#
|
10393
|
+
# This parameter is required.
|
10519
10394
|
self.token = token
|
10520
10395
|
|
10521
10396
|
def validate(self):
|
@@ -10558,7 +10433,7 @@ class GetIndexResponseBody(TeaModel):
|
|
10558
10433
|
def __init__(
|
10559
10434
|
self,
|
10560
10435
|
index_mode: str = None,
|
10561
|
-
keys: Dict[str,
|
10436
|
+
keys: Dict[str, IndexKey] = None,
|
10562
10437
|
last_modify_time: int = None,
|
10563
10438
|
line: GetIndexResponseBodyLine = None,
|
10564
10439
|
log_reduce: bool = None,
|
@@ -10587,6 +10462,8 @@ class GetIndexResponseBody(TeaModel):
|
|
10587
10462
|
# The storage type. The value is fixed as pg.
|
10588
10463
|
self.storage = storage
|
10589
10464
|
# The lifecycle of the index file. Valid values: 7, 30, and 90. Unit: day.
|
10465
|
+
#
|
10466
|
+
# This parameter is required.
|
10590
10467
|
self.ttl = ttl
|
10591
10468
|
|
10592
10469
|
def validate(self):
|
@@ -10634,7 +10511,7 @@ class GetIndexResponseBody(TeaModel):
|
|
10634
10511
|
self.keys = {}
|
10635
10512
|
if m.get('keys') is not None:
|
10636
10513
|
for k, v in m.get('keys').items():
|
10637
|
-
temp_model =
|
10514
|
+
temp_model = IndexKey()
|
10638
10515
|
self.keys[k] = temp_model.from_map(v)
|
10639
10516
|
if m.get('lastModifyTime') is not None:
|
10640
10517
|
self.last_modify_time = m.get('lastModifyTime')
|
@@ -16813,115 +16690,6 @@ class PutWebtrackingResponse(TeaModel):
|
|
16813
16690
|
return self
|
16814
16691
|
|
16815
16692
|
|
16816
|
-
class QueryMLServiceResultsRequest(TeaModel):
|
16817
|
-
def __init__(
|
16818
|
-
self,
|
16819
|
-
allow_builtin: bool = None,
|
16820
|
-
body: MLServiceAnalysisParam = None,
|
16821
|
-
):
|
16822
|
-
self.allow_builtin = allow_builtin
|
16823
|
-
self.body = body
|
16824
|
-
|
16825
|
-
def validate(self):
|
16826
|
-
if self.body:
|
16827
|
-
self.body.validate()
|
16828
|
-
|
16829
|
-
def to_map(self):
|
16830
|
-
_map = super().to_map()
|
16831
|
-
if _map is not None:
|
16832
|
-
return _map
|
16833
|
-
|
16834
|
-
result = dict()
|
16835
|
-
if self.allow_builtin is not None:
|
16836
|
-
result['allowBuiltin'] = self.allow_builtin
|
16837
|
-
if self.body is not None:
|
16838
|
-
result['body'] = self.body.to_map()
|
16839
|
-
return result
|
16840
|
-
|
16841
|
-
def from_map(self, m: dict = None):
|
16842
|
-
m = m or dict()
|
16843
|
-
if m.get('allowBuiltin') is not None:
|
16844
|
-
self.allow_builtin = m.get('allowBuiltin')
|
16845
|
-
if m.get('body') is not None:
|
16846
|
-
temp_model = MLServiceAnalysisParam()
|
16847
|
-
self.body = temp_model.from_map(m['body'])
|
16848
|
-
return self
|
16849
|
-
|
16850
|
-
|
16851
|
-
class QueryMLServiceResultsResponseBody(TeaModel):
|
16852
|
-
def __init__(
|
16853
|
-
self,
|
16854
|
-
data: List[Dict[str, str]] = None,
|
16855
|
-
status: Dict[str, str] = None,
|
16856
|
-
):
|
16857
|
-
self.data = data
|
16858
|
-
self.status = status
|
16859
|
-
|
16860
|
-
def validate(self):
|
16861
|
-
pass
|
16862
|
-
|
16863
|
-
def to_map(self):
|
16864
|
-
_map = super().to_map()
|
16865
|
-
if _map is not None:
|
16866
|
-
return _map
|
16867
|
-
|
16868
|
-
result = dict()
|
16869
|
-
if self.data is not None:
|
16870
|
-
result['data'] = self.data
|
16871
|
-
if self.status is not None:
|
16872
|
-
result['status'] = self.status
|
16873
|
-
return result
|
16874
|
-
|
16875
|
-
def from_map(self, m: dict = None):
|
16876
|
-
m = m or dict()
|
16877
|
-
if m.get('data') is not None:
|
16878
|
-
self.data = m.get('data')
|
16879
|
-
if m.get('status') is not None:
|
16880
|
-
self.status = m.get('status')
|
16881
|
-
return self
|
16882
|
-
|
16883
|
-
|
16884
|
-
class QueryMLServiceResultsResponse(TeaModel):
|
16885
|
-
def __init__(
|
16886
|
-
self,
|
16887
|
-
headers: Dict[str, str] = None,
|
16888
|
-
status_code: int = None,
|
16889
|
-
body: QueryMLServiceResultsResponseBody = None,
|
16890
|
-
):
|
16891
|
-
self.headers = headers
|
16892
|
-
self.status_code = status_code
|
16893
|
-
self.body = body
|
16894
|
-
|
16895
|
-
def validate(self):
|
16896
|
-
if self.body:
|
16897
|
-
self.body.validate()
|
16898
|
-
|
16899
|
-
def to_map(self):
|
16900
|
-
_map = super().to_map()
|
16901
|
-
if _map is not None:
|
16902
|
-
return _map
|
16903
|
-
|
16904
|
-
result = dict()
|
16905
|
-
if self.headers is not None:
|
16906
|
-
result['headers'] = self.headers
|
16907
|
-
if self.status_code is not None:
|
16908
|
-
result['statusCode'] = self.status_code
|
16909
|
-
if self.body is not None:
|
16910
|
-
result['body'] = self.body.to_map()
|
16911
|
-
return result
|
16912
|
-
|
16913
|
-
def from_map(self, m: dict = None):
|
16914
|
-
m = m or dict()
|
16915
|
-
if m.get('headers') is not None:
|
16916
|
-
self.headers = m.get('headers')
|
16917
|
-
if m.get('statusCode') is not None:
|
16918
|
-
self.status_code = m.get('statusCode')
|
16919
|
-
if m.get('body') is not None:
|
16920
|
-
temp_model = QueryMLServiceResultsResponseBody()
|
16921
|
-
self.body = temp_model.from_map(m['body'])
|
16922
|
-
return self
|
16923
|
-
|
16924
|
-
|
16925
16693
|
class RefreshTokenRequest(TeaModel):
|
16926
16694
|
def __init__(
|
16927
16695
|
self,
|
@@ -18168,109 +17936,17 @@ class UpdateETLResponse(TeaModel):
|
|
18168
17936
|
return self
|
18169
17937
|
|
18170
17938
|
|
18171
|
-
class UpdateIndexRequestLine(TeaModel):
|
18172
|
-
def __init__(
|
18173
|
-
self,
|
18174
|
-
case_sensitive: bool = None,
|
18175
|
-
chn: bool = None,
|
18176
|
-
exclude_keys: List[str] = None,
|
18177
|
-
include_keys: List[str] = None,
|
18178
|
-
token: List[str] = None,
|
18179
|
-
):
|
18180
|
-
# Specifies whether to enable case sensitivity. Valid values:
|
18181
|
-
#
|
18182
|
-
# * true
|
18183
|
-
# * false
|
18184
|
-
#
|
18185
|
-
# This parameter is required.
|
18186
|
-
self.case_sensitive = case_sensitive
|
18187
|
-
# Specifies whether to include Chinese characters. Valid values:
|
18188
|
-
#
|
18189
|
-
# * true
|
18190
|
-
# * false
|
18191
|
-
#
|
18192
|
-
# This parameter is required.
|
18193
|
-
self.chn = chn
|
18194
|
-
# The excluded fields. You cannot specify both include_keys and exclude_keys.
|
18195
|
-
self.exclude_keys = exclude_keys
|
18196
|
-
# The included fields. You cannot specify both include_keys and exclude_keys.
|
18197
|
-
self.include_keys = include_keys
|
18198
|
-
# The delimiters that are used to split text.
|
18199
|
-
#
|
18200
|
-
# This parameter is required.
|
18201
|
-
self.token = token
|
18202
|
-
|
18203
|
-
def validate(self):
|
18204
|
-
pass
|
18205
|
-
|
18206
|
-
def to_map(self):
|
18207
|
-
_map = super().to_map()
|
18208
|
-
if _map is not None:
|
18209
|
-
return _map
|
18210
|
-
|
18211
|
-
result = dict()
|
18212
|
-
if self.case_sensitive is not None:
|
18213
|
-
result['caseSensitive'] = self.case_sensitive
|
18214
|
-
if self.chn is not None:
|
18215
|
-
result['chn'] = self.chn
|
18216
|
-
if self.exclude_keys is not None:
|
18217
|
-
result['exclude_keys'] = self.exclude_keys
|
18218
|
-
if self.include_keys is not None:
|
18219
|
-
result['include_keys'] = self.include_keys
|
18220
|
-
if self.token is not None:
|
18221
|
-
result['token'] = self.token
|
18222
|
-
return result
|
18223
|
-
|
18224
|
-
def from_map(self, m: dict = None):
|
18225
|
-
m = m or dict()
|
18226
|
-
if m.get('caseSensitive') is not None:
|
18227
|
-
self.case_sensitive = m.get('caseSensitive')
|
18228
|
-
if m.get('chn') is not None:
|
18229
|
-
self.chn = m.get('chn')
|
18230
|
-
if m.get('exclude_keys') is not None:
|
18231
|
-
self.exclude_keys = m.get('exclude_keys')
|
18232
|
-
if m.get('include_keys') is not None:
|
18233
|
-
self.include_keys = m.get('include_keys')
|
18234
|
-
if m.get('token') is not None:
|
18235
|
-
self.token = m.get('token')
|
18236
|
-
return self
|
18237
|
-
|
18238
|
-
|
18239
17939
|
class UpdateIndexRequest(TeaModel):
|
18240
17940
|
def __init__(
|
18241
17941
|
self,
|
18242
|
-
|
18243
|
-
line: UpdateIndexRequestLine = None,
|
18244
|
-
log_reduce: bool = None,
|
18245
|
-
log_reduce_black_list: List[str] = None,
|
18246
|
-
log_reduce_white_list: List[str] = None,
|
18247
|
-
max_text_len: int = None,
|
18248
|
-
ttl: int = None,
|
17942
|
+
body: Index = None,
|
18249
17943
|
):
|
18250
|
-
# The
|
18251
|
-
self.
|
18252
|
-
# The configuration of full-text indexes.
|
18253
|
-
self.line = line
|
18254
|
-
# Specifies whether to turn on LogReduce. If you turn on LogReduce, only one of `log_reduce_white_list` and `log_reduce_black_list` takes effect.
|
18255
|
-
self.log_reduce = log_reduce
|
18256
|
-
# The fields in the blacklist that you want to use to cluster logs.
|
18257
|
-
self.log_reduce_black_list = log_reduce_black_list
|
18258
|
-
# The fields in the whitelist that you want to use to cluster logs.
|
18259
|
-
self.log_reduce_white_list = log_reduce_white_list
|
18260
|
-
# The maximum length of a field value that can be retained.
|
18261
|
-
self.max_text_len = max_text_len
|
18262
|
-
# The retention period of data. Unit: days. Valid values: 7, 30, and 90.
|
18263
|
-
#
|
18264
|
-
# This parameter is required.
|
18265
|
-
self.ttl = ttl
|
17944
|
+
# The request body.
|
17945
|
+
self.body = body
|
18266
17946
|
|
18267
17947
|
def validate(self):
|
18268
|
-
if self.
|
18269
|
-
|
18270
|
-
if v:
|
18271
|
-
v.validate()
|
18272
|
-
if self.line:
|
18273
|
-
self.line.validate()
|
17948
|
+
if self.body:
|
17949
|
+
self.body.validate()
|
18274
17950
|
|
18275
17951
|
def to_map(self):
|
18276
17952
|
_map = super().to_map()
|
@@ -18278,44 +17954,15 @@ class UpdateIndexRequest(TeaModel):
|
|
18278
17954
|
return _map
|
18279
17955
|
|
18280
17956
|
result = dict()
|
18281
|
-
|
18282
|
-
|
18283
|
-
for k, v in self.keys.items():
|
18284
|
-
result['keys'][k] = v.to_map()
|
18285
|
-
if self.line is not None:
|
18286
|
-
result['line'] = self.line.to_map()
|
18287
|
-
if self.log_reduce is not None:
|
18288
|
-
result['log_reduce'] = self.log_reduce
|
18289
|
-
if self.log_reduce_black_list is not None:
|
18290
|
-
result['log_reduce_black_list'] = self.log_reduce_black_list
|
18291
|
-
if self.log_reduce_white_list is not None:
|
18292
|
-
result['log_reduce_white_list'] = self.log_reduce_white_list
|
18293
|
-
if self.max_text_len is not None:
|
18294
|
-
result['max_text_len'] = self.max_text_len
|
18295
|
-
if self.ttl is not None:
|
18296
|
-
result['ttl'] = self.ttl
|
17957
|
+
if self.body is not None:
|
17958
|
+
result['body'] = self.body.to_map()
|
18297
17959
|
return result
|
18298
17960
|
|
18299
17961
|
def from_map(self, m: dict = None):
|
18300
17962
|
m = m or dict()
|
18301
|
-
|
18302
|
-
|
18303
|
-
|
18304
|
-
temp_model = KeysValue()
|
18305
|
-
self.keys[k] = temp_model.from_map(v)
|
18306
|
-
if m.get('line') is not None:
|
18307
|
-
temp_model = UpdateIndexRequestLine()
|
18308
|
-
self.line = temp_model.from_map(m['line'])
|
18309
|
-
if m.get('log_reduce') is not None:
|
18310
|
-
self.log_reduce = m.get('log_reduce')
|
18311
|
-
if m.get('log_reduce_black_list') is not None:
|
18312
|
-
self.log_reduce_black_list = m.get('log_reduce_black_list')
|
18313
|
-
if m.get('log_reduce_white_list') is not None:
|
18314
|
-
self.log_reduce_white_list = m.get('log_reduce_white_list')
|
18315
|
-
if m.get('max_text_len') is not None:
|
18316
|
-
self.max_text_len = m.get('max_text_len')
|
18317
|
-
if m.get('ttl') is not None:
|
18318
|
-
self.ttl = m.get('ttl')
|
17963
|
+
if m.get('body') is not None:
|
17964
|
+
temp_model = Index()
|
17965
|
+
self.body = temp_model.from_map(m['body'])
|
18319
17966
|
return self
|
18320
17967
|
|
18321
17968
|
|
@@ -0,0 +1,8 @@
|
|
1
|
+
alibabacloud_sls20201230/__init__.py,sha256=IFCzO7T7-atjZukftRuL7th6R9EAwZ_4jp3XkZRsaTw,21
|
2
|
+
alibabacloud_sls20201230/client.py,sha256=r8Uf5qSsteZ84NnK6ktandv9GooKBMNHYvp9SZ5ezXI,894975
|
3
|
+
alibabacloud_sls20201230/models.py,sha256=GFGqJHrhecwfhmyjWvFuBuPcIsroWCiGW9Ivf6b3FIo,636241
|
4
|
+
alibabacloud_sls20201230-5.4.1.dist-info/LICENSE,sha256=0CFItL6bHvxqS44T6vlLoW2R4Zaic304OO3WxN0oXF0,600
|
5
|
+
alibabacloud_sls20201230-5.4.1.dist-info/METADATA,sha256=OW_1Nvcok3eslV7XJj2I0qmwNZwt2TNaVn0y4TW_g2w,2318
|
6
|
+
alibabacloud_sls20201230-5.4.1.dist-info/WHEEL,sha256=2wepM1nk4DS4eFpYrW1TTqPcoGNfHhhO_i5m4cOimbo,92
|
7
|
+
alibabacloud_sls20201230-5.4.1.dist-info/top_level.txt,sha256=_m_h-kYhApsSWSuotU7YSM9-BDg5IBt6N8Tw-HaO3zU,25
|
8
|
+
alibabacloud_sls20201230-5.4.1.dist-info/RECORD,,
|
@@ -1,8 +0,0 @@
|
|
1
|
-
alibabacloud_sls20201230/__init__.py,sha256=N73NFquXZBMYzhRbXrrMstSjprvoaugcSV1Q9V7xv1U,21
|
2
|
-
alibabacloud_sls20201230/client.py,sha256=PcaDwCeGC_rNhbk-QYCrpZSGB_n0D2Zr_OUno8PY9v4,902513
|
3
|
-
alibabacloud_sls20201230/models.py,sha256=Lly-w-B52XKD4b348HEgGi_uZ3F4fDFx-M9b9z5BGEs,649993
|
4
|
-
alibabacloud_sls20201230-5.4.0.dist-info/LICENSE,sha256=0CFItL6bHvxqS44T6vlLoW2R4Zaic304OO3WxN0oXF0,600
|
5
|
-
alibabacloud_sls20201230-5.4.0.dist-info/METADATA,sha256=VzYHh7pMNvSrin6F5h8cjc48YWr1SnQF6GtG-95LCc0,2318
|
6
|
-
alibabacloud_sls20201230-5.4.0.dist-info/WHEEL,sha256=2wepM1nk4DS4eFpYrW1TTqPcoGNfHhhO_i5m4cOimbo,92
|
7
|
-
alibabacloud_sls20201230-5.4.0.dist-info/top_level.txt,sha256=_m_h-kYhApsSWSuotU7YSM9-BDg5IBt6N8Tw-HaO3zU,25
|
8
|
-
alibabacloud_sls20201230-5.4.0.dist-info/RECORD,,
|
{alibabacloud_sls20201230-5.4.0.dist-info → alibabacloud_sls20201230-5.4.1.dist-info}/LICENSE
RENAMED
File without changes
|
File without changes
|
{alibabacloud_sls20201230-5.4.0.dist-info → alibabacloud_sls20201230-5.4.1.dist-info}/top_level.txt
RENAMED
File without changes
|