alibabacloud-quanmiaolightapp20240801 1.4.1__tar.gz → 2.0.1__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {alibabacloud_quanmiaolightapp20240801-1.4.1 → alibabacloud_quanmiaolightapp20240801-2.0.1}/ChangeLog.md +11 -0
- {alibabacloud_quanmiaolightapp20240801-1.4.1 → alibabacloud_quanmiaolightapp20240801-2.0.1}/PKG-INFO +1 -1
- alibabacloud_quanmiaolightapp20240801-2.0.1/alibabacloud_quanmiaolightapp20240801/__init__.py +1 -0
- {alibabacloud_quanmiaolightapp20240801-1.4.1 → alibabacloud_quanmiaolightapp20240801-2.0.1}/alibabacloud_quanmiaolightapp20240801/client.py +8 -120
- {alibabacloud_quanmiaolightapp20240801-1.4.1 → alibabacloud_quanmiaolightapp20240801-2.0.1}/alibabacloud_quanmiaolightapp20240801/models.py +142 -272
- {alibabacloud_quanmiaolightapp20240801-1.4.1 → alibabacloud_quanmiaolightapp20240801-2.0.1}/alibabacloud_quanmiaolightapp20240801.egg-info/PKG-INFO +1 -1
- {alibabacloud_quanmiaolightapp20240801-1.4.1 → alibabacloud_quanmiaolightapp20240801-2.0.1}/setup.py +1 -1
- alibabacloud_quanmiaolightapp20240801-1.4.1/alibabacloud_quanmiaolightapp20240801/__init__.py +0 -1
- {alibabacloud_quanmiaolightapp20240801-1.4.1 → alibabacloud_quanmiaolightapp20240801-2.0.1}/LICENSE +0 -0
- {alibabacloud_quanmiaolightapp20240801-1.4.1 → alibabacloud_quanmiaolightapp20240801-2.0.1}/MANIFEST.in +0 -0
- {alibabacloud_quanmiaolightapp20240801-1.4.1 → alibabacloud_quanmiaolightapp20240801-2.0.1}/README-CN.md +0 -0
- {alibabacloud_quanmiaolightapp20240801-1.4.1 → alibabacloud_quanmiaolightapp20240801-2.0.1}/README.md +0 -0
- {alibabacloud_quanmiaolightapp20240801-1.4.1 → alibabacloud_quanmiaolightapp20240801-2.0.1}/alibabacloud_quanmiaolightapp20240801.egg-info/SOURCES.txt +0 -0
- {alibabacloud_quanmiaolightapp20240801-1.4.1 → alibabacloud_quanmiaolightapp20240801-2.0.1}/alibabacloud_quanmiaolightapp20240801.egg-info/dependency_links.txt +0 -0
- {alibabacloud_quanmiaolightapp20240801-1.4.1 → alibabacloud_quanmiaolightapp20240801-2.0.1}/alibabacloud_quanmiaolightapp20240801.egg-info/requires.txt +0 -0
- {alibabacloud_quanmiaolightapp20240801-1.4.1 → alibabacloud_quanmiaolightapp20240801-2.0.1}/alibabacloud_quanmiaolightapp20240801.egg-info/top_level.txt +0 -0
- {alibabacloud_quanmiaolightapp20240801-1.4.1 → alibabacloud_quanmiaolightapp20240801-2.0.1}/setup.cfg +0 -0
|
@@ -1,3 +1,14 @@
|
|
|
1
|
+
2024-12-05 Version: 2.0.0
|
|
2
|
+
- Delete API RunCommentGeneration.
|
|
3
|
+
- Update API RunHotTopicChat: update response param.
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
2024-11-06 Version: 1.4.1
|
|
7
|
+
- Update API RunMarketingInformationWriting: add param customLimitation.
|
|
8
|
+
- Update API RunMarketingInformationWriting: add param inputExample.
|
|
9
|
+
- Update API RunMarketingInformationWriting: add param outputExample.
|
|
10
|
+
|
|
11
|
+
|
|
1
12
|
2024-11-01 Version: 1.4.0
|
|
2
13
|
- Support API RunHotTopicChat.
|
|
3
14
|
- Support API RunHotTopicSummary.
|
{alibabacloud_quanmiaolightapp20240801-1.4.1 → alibabacloud_quanmiaolightapp20240801-2.0.1}/PKG-INFO
RENAMED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: alibabacloud_quanmiaolightapp20240801
|
|
3
|
-
Version:
|
|
3
|
+
Version: 2.0.1
|
|
4
4
|
Summary: Alibaba Cloud QuanMiaoLightApp (20240801) SDK Library for Python
|
|
5
5
|
Home-page: https://github.com/aliyun/alibabacloud-python-sdk
|
|
6
6
|
Author: Alibaba Cloud SDK
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
__version__ = '2.0.1'
|
|
@@ -273,126 +273,6 @@ class Client(OpenApiClient):
|
|
|
273
273
|
headers = {}
|
|
274
274
|
return await self.list_hot_topic_summaries_with_options_async(workspace_id, request, headers, runtime)
|
|
275
275
|
|
|
276
|
-
def run_comment_generation_with_options(
|
|
277
|
-
self,
|
|
278
|
-
workspace_id: str,
|
|
279
|
-
request: quan_miao_light_app_20240801_models.RunCommentGenerationRequest,
|
|
280
|
-
headers: Dict[str, str],
|
|
281
|
-
runtime: util_models.RuntimeOptions,
|
|
282
|
-
) -> quan_miao_light_app_20240801_models.RunCommentGenerationResponse:
|
|
283
|
-
"""
|
|
284
|
-
@summary 评论生成服务
|
|
285
|
-
|
|
286
|
-
@param request: RunCommentGenerationRequest
|
|
287
|
-
@param headers: map
|
|
288
|
-
@param runtime: runtime options for this request RuntimeOptions
|
|
289
|
-
@return: RunCommentGenerationResponse
|
|
290
|
-
"""
|
|
291
|
-
UtilClient.validate_model(request)
|
|
292
|
-
body = {}
|
|
293
|
-
if not UtilClient.is_unset(request.length):
|
|
294
|
-
body['length'] = request.length
|
|
295
|
-
if not UtilClient.is_unset(request.num_comments):
|
|
296
|
-
body['numComments'] = request.num_comments
|
|
297
|
-
if not UtilClient.is_unset(request.source_material):
|
|
298
|
-
body['sourceMaterial'] = request.source_material
|
|
299
|
-
if not UtilClient.is_unset(request.style):
|
|
300
|
-
body['style'] = request.style
|
|
301
|
-
req = open_api_models.OpenApiRequest(
|
|
302
|
-
headers=headers,
|
|
303
|
-
body=OpenApiUtilClient.parse_to_map(body)
|
|
304
|
-
)
|
|
305
|
-
params = open_api_models.Params(
|
|
306
|
-
action='RunCommentGeneration',
|
|
307
|
-
version='2024-08-01',
|
|
308
|
-
protocol='HTTPS',
|
|
309
|
-
pathname=f'/{OpenApiUtilClient.get_encode_param(workspace_id)}/quanmiao/lightapp/runCommentGeneration',
|
|
310
|
-
method='POST',
|
|
311
|
-
auth_type='AK',
|
|
312
|
-
style='ROA',
|
|
313
|
-
req_body_type='formData',
|
|
314
|
-
body_type='json'
|
|
315
|
-
)
|
|
316
|
-
return TeaCore.from_map(
|
|
317
|
-
quan_miao_light_app_20240801_models.RunCommentGenerationResponse(),
|
|
318
|
-
self.call_api(params, req, runtime)
|
|
319
|
-
)
|
|
320
|
-
|
|
321
|
-
async def run_comment_generation_with_options_async(
|
|
322
|
-
self,
|
|
323
|
-
workspace_id: str,
|
|
324
|
-
request: quan_miao_light_app_20240801_models.RunCommentGenerationRequest,
|
|
325
|
-
headers: Dict[str, str],
|
|
326
|
-
runtime: util_models.RuntimeOptions,
|
|
327
|
-
) -> quan_miao_light_app_20240801_models.RunCommentGenerationResponse:
|
|
328
|
-
"""
|
|
329
|
-
@summary 评论生成服务
|
|
330
|
-
|
|
331
|
-
@param request: RunCommentGenerationRequest
|
|
332
|
-
@param headers: map
|
|
333
|
-
@param runtime: runtime options for this request RuntimeOptions
|
|
334
|
-
@return: RunCommentGenerationResponse
|
|
335
|
-
"""
|
|
336
|
-
UtilClient.validate_model(request)
|
|
337
|
-
body = {}
|
|
338
|
-
if not UtilClient.is_unset(request.length):
|
|
339
|
-
body['length'] = request.length
|
|
340
|
-
if not UtilClient.is_unset(request.num_comments):
|
|
341
|
-
body['numComments'] = request.num_comments
|
|
342
|
-
if not UtilClient.is_unset(request.source_material):
|
|
343
|
-
body['sourceMaterial'] = request.source_material
|
|
344
|
-
if not UtilClient.is_unset(request.style):
|
|
345
|
-
body['style'] = request.style
|
|
346
|
-
req = open_api_models.OpenApiRequest(
|
|
347
|
-
headers=headers,
|
|
348
|
-
body=OpenApiUtilClient.parse_to_map(body)
|
|
349
|
-
)
|
|
350
|
-
params = open_api_models.Params(
|
|
351
|
-
action='RunCommentGeneration',
|
|
352
|
-
version='2024-08-01',
|
|
353
|
-
protocol='HTTPS',
|
|
354
|
-
pathname=f'/{OpenApiUtilClient.get_encode_param(workspace_id)}/quanmiao/lightapp/runCommentGeneration',
|
|
355
|
-
method='POST',
|
|
356
|
-
auth_type='AK',
|
|
357
|
-
style='ROA',
|
|
358
|
-
req_body_type='formData',
|
|
359
|
-
body_type='json'
|
|
360
|
-
)
|
|
361
|
-
return TeaCore.from_map(
|
|
362
|
-
quan_miao_light_app_20240801_models.RunCommentGenerationResponse(),
|
|
363
|
-
await self.call_api_async(params, req, runtime)
|
|
364
|
-
)
|
|
365
|
-
|
|
366
|
-
def run_comment_generation(
|
|
367
|
-
self,
|
|
368
|
-
workspace_id: str,
|
|
369
|
-
request: quan_miao_light_app_20240801_models.RunCommentGenerationRequest,
|
|
370
|
-
) -> quan_miao_light_app_20240801_models.RunCommentGenerationResponse:
|
|
371
|
-
"""
|
|
372
|
-
@summary 评论生成服务
|
|
373
|
-
|
|
374
|
-
@param request: RunCommentGenerationRequest
|
|
375
|
-
@return: RunCommentGenerationResponse
|
|
376
|
-
"""
|
|
377
|
-
runtime = util_models.RuntimeOptions()
|
|
378
|
-
headers = {}
|
|
379
|
-
return self.run_comment_generation_with_options(workspace_id, request, headers, runtime)
|
|
380
|
-
|
|
381
|
-
async def run_comment_generation_async(
|
|
382
|
-
self,
|
|
383
|
-
workspace_id: str,
|
|
384
|
-
request: quan_miao_light_app_20240801_models.RunCommentGenerationRequest,
|
|
385
|
-
) -> quan_miao_light_app_20240801_models.RunCommentGenerationResponse:
|
|
386
|
-
"""
|
|
387
|
-
@summary 评论生成服务
|
|
388
|
-
|
|
389
|
-
@param request: RunCommentGenerationRequest
|
|
390
|
-
@return: RunCommentGenerationResponse
|
|
391
|
-
"""
|
|
392
|
-
runtime = util_models.RuntimeOptions()
|
|
393
|
-
headers = {}
|
|
394
|
-
return await self.run_comment_generation_with_options_async(workspace_id, request, headers, runtime)
|
|
395
|
-
|
|
396
276
|
def run_hot_topic_chat_with_options(
|
|
397
277
|
self,
|
|
398
278
|
workspace_id: str,
|
|
@@ -415,6 +295,8 @@ class Client(OpenApiClient):
|
|
|
415
295
|
request.generate_options_shrink = OpenApiUtilClient.array_to_string_with_specified_style(tmp_req.generate_options, 'generateOptions', 'json')
|
|
416
296
|
if not UtilClient.is_unset(tmp_req.hot_topics):
|
|
417
297
|
request.hot_topics_shrink = OpenApiUtilClient.array_to_string_with_specified_style(tmp_req.hot_topics, 'hotTopics', 'json')
|
|
298
|
+
if not UtilClient.is_unset(tmp_req.messages):
|
|
299
|
+
request.messages_shrink = OpenApiUtilClient.array_to_string_with_specified_style(tmp_req.messages, 'messages', 'json')
|
|
418
300
|
if not UtilClient.is_unset(tmp_req.step_for_broadcast_content_config):
|
|
419
301
|
request.step_for_broadcast_content_config_shrink = OpenApiUtilClient.array_to_string_with_specified_style(tmp_req.step_for_broadcast_content_config, 'stepForBroadcastContentConfig', 'json')
|
|
420
302
|
body = {}
|
|
@@ -428,6 +310,8 @@ class Client(OpenApiClient):
|
|
|
428
310
|
body['hotTopics'] = request.hot_topics_shrink
|
|
429
311
|
if not UtilClient.is_unset(request.image_count):
|
|
430
312
|
body['imageCount'] = request.image_count
|
|
313
|
+
if not UtilClient.is_unset(request.messages_shrink):
|
|
314
|
+
body['messages'] = request.messages_shrink
|
|
431
315
|
if not UtilClient.is_unset(request.model_custom_prompt_template):
|
|
432
316
|
body['modelCustomPromptTemplate'] = request.model_custom_prompt_template
|
|
433
317
|
if not UtilClient.is_unset(request.model_id):
|
|
@@ -482,6 +366,8 @@ class Client(OpenApiClient):
|
|
|
482
366
|
request.generate_options_shrink = OpenApiUtilClient.array_to_string_with_specified_style(tmp_req.generate_options, 'generateOptions', 'json')
|
|
483
367
|
if not UtilClient.is_unset(tmp_req.hot_topics):
|
|
484
368
|
request.hot_topics_shrink = OpenApiUtilClient.array_to_string_with_specified_style(tmp_req.hot_topics, 'hotTopics', 'json')
|
|
369
|
+
if not UtilClient.is_unset(tmp_req.messages):
|
|
370
|
+
request.messages_shrink = OpenApiUtilClient.array_to_string_with_specified_style(tmp_req.messages, 'messages', 'json')
|
|
485
371
|
if not UtilClient.is_unset(tmp_req.step_for_broadcast_content_config):
|
|
486
372
|
request.step_for_broadcast_content_config_shrink = OpenApiUtilClient.array_to_string_with_specified_style(tmp_req.step_for_broadcast_content_config, 'stepForBroadcastContentConfig', 'json')
|
|
487
373
|
body = {}
|
|
@@ -495,6 +381,8 @@ class Client(OpenApiClient):
|
|
|
495
381
|
body['hotTopics'] = request.hot_topics_shrink
|
|
496
382
|
if not UtilClient.is_unset(request.image_count):
|
|
497
383
|
body['imageCount'] = request.image_count
|
|
384
|
+
if not UtilClient.is_unset(request.messages_shrink):
|
|
385
|
+
body['messages'] = request.messages_shrink
|
|
498
386
|
if not UtilClient.is_unset(request.model_custom_prompt_template):
|
|
499
387
|
body['modelCustomPromptTemplate'] = request.model_custom_prompt_template
|
|
500
388
|
if not UtilClient.is_unset(request.model_id):
|
|
@@ -741,145 +741,16 @@ class ListHotTopicSummariesResponse(TeaModel):
|
|
|
741
741
|
return self
|
|
742
742
|
|
|
743
743
|
|
|
744
|
-
class
|
|
744
|
+
class RunHotTopicChatRequestMessages(TeaModel):
|
|
745
745
|
def __init__(
|
|
746
746
|
self,
|
|
747
|
-
|
|
748
|
-
|
|
749
|
-
|
|
750
|
-
style: str = None,
|
|
751
|
-
):
|
|
752
|
-
self.length = length
|
|
753
|
-
self.num_comments = num_comments
|
|
754
|
-
self.source_material = source_material
|
|
755
|
-
self.style = style
|
|
756
|
-
|
|
757
|
-
def validate(self):
|
|
758
|
-
pass
|
|
759
|
-
|
|
760
|
-
def to_map(self):
|
|
761
|
-
_map = super().to_map()
|
|
762
|
-
if _map is not None:
|
|
763
|
-
return _map
|
|
764
|
-
|
|
765
|
-
result = dict()
|
|
766
|
-
if self.length is not None:
|
|
767
|
-
result['length'] = self.length
|
|
768
|
-
if self.num_comments is not None:
|
|
769
|
-
result['numComments'] = self.num_comments
|
|
770
|
-
if self.source_material is not None:
|
|
771
|
-
result['sourceMaterial'] = self.source_material
|
|
772
|
-
if self.style is not None:
|
|
773
|
-
result['style'] = self.style
|
|
774
|
-
return result
|
|
775
|
-
|
|
776
|
-
def from_map(self, m: dict = None):
|
|
777
|
-
m = m or dict()
|
|
778
|
-
if m.get('length') is not None:
|
|
779
|
-
self.length = m.get('length')
|
|
780
|
-
if m.get('numComments') is not None:
|
|
781
|
-
self.num_comments = m.get('numComments')
|
|
782
|
-
if m.get('sourceMaterial') is not None:
|
|
783
|
-
self.source_material = m.get('sourceMaterial')
|
|
784
|
-
if m.get('style') is not None:
|
|
785
|
-
self.style = m.get('style')
|
|
786
|
-
return self
|
|
787
|
-
|
|
788
|
-
|
|
789
|
-
class RunCommentGenerationResponseBodyHeader(TeaModel):
|
|
790
|
-
def __init__(
|
|
791
|
-
self,
|
|
792
|
-
event: str = None,
|
|
793
|
-
event_info: str = None,
|
|
794
|
-
request_id: str = None,
|
|
795
|
-
session_id: str = None,
|
|
796
|
-
task_id: str = None,
|
|
797
|
-
trace_id: str = None,
|
|
798
|
-
):
|
|
799
|
-
self.event = event
|
|
800
|
-
self.event_info = event_info
|
|
801
|
-
self.request_id = request_id
|
|
802
|
-
self.session_id = session_id
|
|
803
|
-
self.task_id = task_id
|
|
804
|
-
self.trace_id = trace_id
|
|
805
|
-
|
|
806
|
-
def validate(self):
|
|
807
|
-
pass
|
|
808
|
-
|
|
809
|
-
def to_map(self):
|
|
810
|
-
_map = super().to_map()
|
|
811
|
-
if _map is not None:
|
|
812
|
-
return _map
|
|
813
|
-
|
|
814
|
-
result = dict()
|
|
815
|
-
if self.event is not None:
|
|
816
|
-
result['event'] = self.event
|
|
817
|
-
if self.event_info is not None:
|
|
818
|
-
result['eventInfo'] = self.event_info
|
|
819
|
-
if self.request_id is not None:
|
|
820
|
-
result['requestId'] = self.request_id
|
|
821
|
-
if self.session_id is not None:
|
|
822
|
-
result['sessionId'] = self.session_id
|
|
823
|
-
if self.task_id is not None:
|
|
824
|
-
result['taskId'] = self.task_id
|
|
825
|
-
if self.trace_id is not None:
|
|
826
|
-
result['traceId'] = self.trace_id
|
|
827
|
-
return result
|
|
828
|
-
|
|
829
|
-
def from_map(self, m: dict = None):
|
|
830
|
-
m = m or dict()
|
|
831
|
-
if m.get('event') is not None:
|
|
832
|
-
self.event = m.get('event')
|
|
833
|
-
if m.get('eventInfo') is not None:
|
|
834
|
-
self.event_info = m.get('eventInfo')
|
|
835
|
-
if m.get('requestId') is not None:
|
|
836
|
-
self.request_id = m.get('requestId')
|
|
837
|
-
if m.get('sessionId') is not None:
|
|
838
|
-
self.session_id = m.get('sessionId')
|
|
839
|
-
if m.get('taskId') is not None:
|
|
840
|
-
self.task_id = m.get('taskId')
|
|
841
|
-
if m.get('traceId') is not None:
|
|
842
|
-
self.trace_id = m.get('traceId')
|
|
843
|
-
return self
|
|
844
|
-
|
|
845
|
-
|
|
846
|
-
class RunCommentGenerationResponseBodyPayloadOutput(TeaModel):
|
|
847
|
-
def __init__(
|
|
848
|
-
self,
|
|
849
|
-
text: str = None,
|
|
850
|
-
):
|
|
851
|
-
self.text = text
|
|
852
|
-
|
|
853
|
-
def validate(self):
|
|
854
|
-
pass
|
|
855
|
-
|
|
856
|
-
def to_map(self):
|
|
857
|
-
_map = super().to_map()
|
|
858
|
-
if _map is not None:
|
|
859
|
-
return _map
|
|
860
|
-
|
|
861
|
-
result = dict()
|
|
862
|
-
if self.text is not None:
|
|
863
|
-
result['text'] = self.text
|
|
864
|
-
return result
|
|
865
|
-
|
|
866
|
-
def from_map(self, m: dict = None):
|
|
867
|
-
m = m or dict()
|
|
868
|
-
if m.get('text') is not None:
|
|
869
|
-
self.text = m.get('text')
|
|
870
|
-
return self
|
|
871
|
-
|
|
872
|
-
|
|
873
|
-
class RunCommentGenerationResponseBodyPayloadUsage(TeaModel):
|
|
874
|
-
def __init__(
|
|
875
|
-
self,
|
|
876
|
-
input_tokens: int = None,
|
|
877
|
-
output_tokens: int = None,
|
|
878
|
-
total_tokens: int = None,
|
|
747
|
+
content: str = None,
|
|
748
|
+
create_time: str = None,
|
|
749
|
+
role: str = None,
|
|
879
750
|
):
|
|
880
|
-
self.
|
|
881
|
-
self.
|
|
882
|
-
self.
|
|
751
|
+
self.content = content
|
|
752
|
+
self.create_time = create_time
|
|
753
|
+
self.role = role
|
|
883
754
|
|
|
884
755
|
def validate(self):
|
|
885
756
|
pass
|
|
@@ -890,145 +761,22 @@ class RunCommentGenerationResponseBodyPayloadUsage(TeaModel):
|
|
|
890
761
|
return _map
|
|
891
762
|
|
|
892
763
|
result = dict()
|
|
893
|
-
if self.
|
|
894
|
-
result['
|
|
895
|
-
if self.
|
|
896
|
-
result['
|
|
897
|
-
if self.
|
|
898
|
-
result['
|
|
899
|
-
return result
|
|
900
|
-
|
|
901
|
-
def from_map(self, m: dict = None):
|
|
902
|
-
m = m or dict()
|
|
903
|
-
if m.get('inputTokens') is not None:
|
|
904
|
-
self.input_tokens = m.get('inputTokens')
|
|
905
|
-
if m.get('outputTokens') is not None:
|
|
906
|
-
self.output_tokens = m.get('outputTokens')
|
|
907
|
-
if m.get('totalTokens') is not None:
|
|
908
|
-
self.total_tokens = m.get('totalTokens')
|
|
909
|
-
return self
|
|
910
|
-
|
|
911
|
-
|
|
912
|
-
class RunCommentGenerationResponseBodyPayload(TeaModel):
|
|
913
|
-
def __init__(
|
|
914
|
-
self,
|
|
915
|
-
output: RunCommentGenerationResponseBodyPayloadOutput = None,
|
|
916
|
-
usage: RunCommentGenerationResponseBodyPayloadUsage = None,
|
|
917
|
-
):
|
|
918
|
-
self.output = output
|
|
919
|
-
self.usage = usage
|
|
920
|
-
|
|
921
|
-
def validate(self):
|
|
922
|
-
if self.output:
|
|
923
|
-
self.output.validate()
|
|
924
|
-
if self.usage:
|
|
925
|
-
self.usage.validate()
|
|
926
|
-
|
|
927
|
-
def to_map(self):
|
|
928
|
-
_map = super().to_map()
|
|
929
|
-
if _map is not None:
|
|
930
|
-
return _map
|
|
931
|
-
|
|
932
|
-
result = dict()
|
|
933
|
-
if self.output is not None:
|
|
934
|
-
result['output'] = self.output.to_map()
|
|
935
|
-
if self.usage is not None:
|
|
936
|
-
result['usage'] = self.usage.to_map()
|
|
937
|
-
return result
|
|
938
|
-
|
|
939
|
-
def from_map(self, m: dict = None):
|
|
940
|
-
m = m or dict()
|
|
941
|
-
if m.get('output') is not None:
|
|
942
|
-
temp_model = RunCommentGenerationResponseBodyPayloadOutput()
|
|
943
|
-
self.output = temp_model.from_map(m['output'])
|
|
944
|
-
if m.get('usage') is not None:
|
|
945
|
-
temp_model = RunCommentGenerationResponseBodyPayloadUsage()
|
|
946
|
-
self.usage = temp_model.from_map(m['usage'])
|
|
947
|
-
return self
|
|
948
|
-
|
|
949
|
-
|
|
950
|
-
class RunCommentGenerationResponseBody(TeaModel):
|
|
951
|
-
def __init__(
|
|
952
|
-
self,
|
|
953
|
-
end: bool = None,
|
|
954
|
-
header: RunCommentGenerationResponseBodyHeader = None,
|
|
955
|
-
payload: RunCommentGenerationResponseBodyPayload = None,
|
|
956
|
-
):
|
|
957
|
-
self.end = end
|
|
958
|
-
self.header = header
|
|
959
|
-
self.payload = payload
|
|
960
|
-
|
|
961
|
-
def validate(self):
|
|
962
|
-
if self.header:
|
|
963
|
-
self.header.validate()
|
|
964
|
-
if self.payload:
|
|
965
|
-
self.payload.validate()
|
|
966
|
-
|
|
967
|
-
def to_map(self):
|
|
968
|
-
_map = super().to_map()
|
|
969
|
-
if _map is not None:
|
|
970
|
-
return _map
|
|
971
|
-
|
|
972
|
-
result = dict()
|
|
973
|
-
if self.end is not None:
|
|
974
|
-
result['end'] = self.end
|
|
975
|
-
if self.header is not None:
|
|
976
|
-
result['header'] = self.header.to_map()
|
|
977
|
-
if self.payload is not None:
|
|
978
|
-
result['payload'] = self.payload.to_map()
|
|
979
|
-
return result
|
|
980
|
-
|
|
981
|
-
def from_map(self, m: dict = None):
|
|
982
|
-
m = m or dict()
|
|
983
|
-
if m.get('end') is not None:
|
|
984
|
-
self.end = m.get('end')
|
|
985
|
-
if m.get('header') is not None:
|
|
986
|
-
temp_model = RunCommentGenerationResponseBodyHeader()
|
|
987
|
-
self.header = temp_model.from_map(m['header'])
|
|
988
|
-
if m.get('payload') is not None:
|
|
989
|
-
temp_model = RunCommentGenerationResponseBodyPayload()
|
|
990
|
-
self.payload = temp_model.from_map(m['payload'])
|
|
991
|
-
return self
|
|
992
|
-
|
|
993
|
-
|
|
994
|
-
class RunCommentGenerationResponse(TeaModel):
|
|
995
|
-
def __init__(
|
|
996
|
-
self,
|
|
997
|
-
headers: Dict[str, str] = None,
|
|
998
|
-
status_code: int = None,
|
|
999
|
-
body: RunCommentGenerationResponseBody = None,
|
|
1000
|
-
):
|
|
1001
|
-
self.headers = headers
|
|
1002
|
-
self.status_code = status_code
|
|
1003
|
-
self.body = body
|
|
1004
|
-
|
|
1005
|
-
def validate(self):
|
|
1006
|
-
if self.body:
|
|
1007
|
-
self.body.validate()
|
|
1008
|
-
|
|
1009
|
-
def to_map(self):
|
|
1010
|
-
_map = super().to_map()
|
|
1011
|
-
if _map is not None:
|
|
1012
|
-
return _map
|
|
1013
|
-
|
|
1014
|
-
result = dict()
|
|
1015
|
-
if self.headers is not None:
|
|
1016
|
-
result['headers'] = self.headers
|
|
1017
|
-
if self.status_code is not None:
|
|
1018
|
-
result['statusCode'] = self.status_code
|
|
1019
|
-
if self.body is not None:
|
|
1020
|
-
result['body'] = self.body.to_map()
|
|
764
|
+
if self.content is not None:
|
|
765
|
+
result['content'] = self.content
|
|
766
|
+
if self.create_time is not None:
|
|
767
|
+
result['createTime'] = self.create_time
|
|
768
|
+
if self.role is not None:
|
|
769
|
+
result['role'] = self.role
|
|
1021
770
|
return result
|
|
1022
771
|
|
|
1023
772
|
def from_map(self, m: dict = None):
|
|
1024
773
|
m = m or dict()
|
|
1025
|
-
if m.get('
|
|
1026
|
-
self.
|
|
1027
|
-
if m.get('
|
|
1028
|
-
self.
|
|
1029
|
-
if m.get('
|
|
1030
|
-
|
|
1031
|
-
self.body = temp_model.from_map(m['body'])
|
|
774
|
+
if m.get('content') is not None:
|
|
775
|
+
self.content = m.get('content')
|
|
776
|
+
if m.get('createTime') is not None:
|
|
777
|
+
self.create_time = m.get('createTime')
|
|
778
|
+
if m.get('role') is not None:
|
|
779
|
+
self.role = m.get('role')
|
|
1032
780
|
return self
|
|
1033
781
|
|
|
1034
782
|
|
|
@@ -1120,6 +868,7 @@ class RunHotTopicChatRequest(TeaModel):
|
|
|
1120
868
|
hot_topic_version: str = None,
|
|
1121
869
|
hot_topics: List[str] = None,
|
|
1122
870
|
image_count: int = None,
|
|
871
|
+
messages: List[RunHotTopicChatRequestMessages] = None,
|
|
1123
872
|
model_custom_prompt_template: str = None,
|
|
1124
873
|
model_id: str = None,
|
|
1125
874
|
original_session_id: str = None,
|
|
@@ -1132,6 +881,7 @@ class RunHotTopicChatRequest(TeaModel):
|
|
|
1132
881
|
self.hot_topic_version = hot_topic_version
|
|
1133
882
|
self.hot_topics = hot_topics
|
|
1134
883
|
self.image_count = image_count
|
|
884
|
+
self.messages = messages
|
|
1135
885
|
self.model_custom_prompt_template = model_custom_prompt_template
|
|
1136
886
|
self.model_id = model_id
|
|
1137
887
|
self.original_session_id = original_session_id
|
|
@@ -1140,6 +890,10 @@ class RunHotTopicChatRequest(TeaModel):
|
|
|
1140
890
|
self.task_id = task_id
|
|
1141
891
|
|
|
1142
892
|
def validate(self):
|
|
893
|
+
if self.messages:
|
|
894
|
+
for k in self.messages:
|
|
895
|
+
if k:
|
|
896
|
+
k.validate()
|
|
1143
897
|
if self.step_for_broadcast_content_config:
|
|
1144
898
|
self.step_for_broadcast_content_config.validate()
|
|
1145
899
|
|
|
@@ -1159,6 +913,10 @@ class RunHotTopicChatRequest(TeaModel):
|
|
|
1159
913
|
result['hotTopics'] = self.hot_topics
|
|
1160
914
|
if self.image_count is not None:
|
|
1161
915
|
result['imageCount'] = self.image_count
|
|
916
|
+
result['messages'] = []
|
|
917
|
+
if self.messages is not None:
|
|
918
|
+
for k in self.messages:
|
|
919
|
+
result['messages'].append(k.to_map() if k else None)
|
|
1162
920
|
if self.model_custom_prompt_template is not None:
|
|
1163
921
|
result['modelCustomPromptTemplate'] = self.model_custom_prompt_template
|
|
1164
922
|
if self.model_id is not None:
|
|
@@ -1185,6 +943,11 @@ class RunHotTopicChatRequest(TeaModel):
|
|
|
1185
943
|
self.hot_topics = m.get('hotTopics')
|
|
1186
944
|
if m.get('imageCount') is not None:
|
|
1187
945
|
self.image_count = m.get('imageCount')
|
|
946
|
+
self.messages = []
|
|
947
|
+
if m.get('messages') is not None:
|
|
948
|
+
for k in m.get('messages'):
|
|
949
|
+
temp_model = RunHotTopicChatRequestMessages()
|
|
950
|
+
self.messages.append(temp_model.from_map(k))
|
|
1188
951
|
if m.get('modelCustomPromptTemplate') is not None:
|
|
1189
952
|
self.model_custom_prompt_template = m.get('modelCustomPromptTemplate')
|
|
1190
953
|
if m.get('modelId') is not None:
|
|
@@ -1209,6 +972,7 @@ class RunHotTopicChatShrinkRequest(TeaModel):
|
|
|
1209
972
|
hot_topic_version: str = None,
|
|
1210
973
|
hot_topics_shrink: str = None,
|
|
1211
974
|
image_count: int = None,
|
|
975
|
+
messages_shrink: str = None,
|
|
1212
976
|
model_custom_prompt_template: str = None,
|
|
1213
977
|
model_id: str = None,
|
|
1214
978
|
original_session_id: str = None,
|
|
@@ -1221,6 +985,7 @@ class RunHotTopicChatShrinkRequest(TeaModel):
|
|
|
1221
985
|
self.hot_topic_version = hot_topic_version
|
|
1222
986
|
self.hot_topics_shrink = hot_topics_shrink
|
|
1223
987
|
self.image_count = image_count
|
|
988
|
+
self.messages_shrink = messages_shrink
|
|
1224
989
|
self.model_custom_prompt_template = model_custom_prompt_template
|
|
1225
990
|
self.model_id = model_id
|
|
1226
991
|
self.original_session_id = original_session_id
|
|
@@ -1247,6 +1012,8 @@ class RunHotTopicChatShrinkRequest(TeaModel):
|
|
|
1247
1012
|
result['hotTopics'] = self.hot_topics_shrink
|
|
1248
1013
|
if self.image_count is not None:
|
|
1249
1014
|
result['imageCount'] = self.image_count
|
|
1015
|
+
if self.messages_shrink is not None:
|
|
1016
|
+
result['messages'] = self.messages_shrink
|
|
1250
1017
|
if self.model_custom_prompt_template is not None:
|
|
1251
1018
|
result['modelCustomPromptTemplate'] = self.model_custom_prompt_template
|
|
1252
1019
|
if self.model_id is not None:
|
|
@@ -1273,6 +1040,8 @@ class RunHotTopicChatShrinkRequest(TeaModel):
|
|
|
1273
1040
|
self.hot_topics_shrink = m.get('hotTopics')
|
|
1274
1041
|
if m.get('imageCount') is not None:
|
|
1275
1042
|
self.image_count = m.get('imageCount')
|
|
1043
|
+
if m.get('messages') is not None:
|
|
1044
|
+
self.messages_shrink = m.get('messages')
|
|
1276
1045
|
if m.get('modelCustomPromptTemplate') is not None:
|
|
1277
1046
|
self.model_custom_prompt_template = m.get('modelCustomPromptTemplate')
|
|
1278
1047
|
if m.get('modelId') is not None:
|
|
@@ -1420,21 +1189,96 @@ class RunHotTopicChatResponseBodyPayloadOutputArticles(TeaModel):
|
|
|
1420
1189
|
return self
|
|
1421
1190
|
|
|
1422
1191
|
|
|
1192
|
+
class RunHotTopicChatResponseBodyPayloadOutputHotTopicSummariesImages(TeaModel):
|
|
1193
|
+
def __init__(
|
|
1194
|
+
self,
|
|
1195
|
+
url: str = None,
|
|
1196
|
+
):
|
|
1197
|
+
self.url = url
|
|
1198
|
+
|
|
1199
|
+
def validate(self):
|
|
1200
|
+
pass
|
|
1201
|
+
|
|
1202
|
+
def to_map(self):
|
|
1203
|
+
_map = super().to_map()
|
|
1204
|
+
if _map is not None:
|
|
1205
|
+
return _map
|
|
1206
|
+
|
|
1207
|
+
result = dict()
|
|
1208
|
+
if self.url is not None:
|
|
1209
|
+
result['url'] = self.url
|
|
1210
|
+
return result
|
|
1211
|
+
|
|
1212
|
+
def from_map(self, m: dict = None):
|
|
1213
|
+
m = m or dict()
|
|
1214
|
+
if m.get('url') is not None:
|
|
1215
|
+
self.url = m.get('url')
|
|
1216
|
+
return self
|
|
1217
|
+
|
|
1218
|
+
|
|
1219
|
+
class RunHotTopicChatResponseBodyPayloadOutputHotTopicSummariesNews(TeaModel):
|
|
1220
|
+
def __init__(
|
|
1221
|
+
self,
|
|
1222
|
+
title: str = None,
|
|
1223
|
+
url: str = None,
|
|
1224
|
+
):
|
|
1225
|
+
self.title = title
|
|
1226
|
+
self.url = url
|
|
1227
|
+
|
|
1228
|
+
def validate(self):
|
|
1229
|
+
pass
|
|
1230
|
+
|
|
1231
|
+
def to_map(self):
|
|
1232
|
+
_map = super().to_map()
|
|
1233
|
+
if _map is not None:
|
|
1234
|
+
return _map
|
|
1235
|
+
|
|
1236
|
+
result = dict()
|
|
1237
|
+
if self.title is not None:
|
|
1238
|
+
result['title'] = self.title
|
|
1239
|
+
if self.url is not None:
|
|
1240
|
+
result['url'] = self.url
|
|
1241
|
+
return result
|
|
1242
|
+
|
|
1243
|
+
def from_map(self, m: dict = None):
|
|
1244
|
+
m = m or dict()
|
|
1245
|
+
if m.get('title') is not None:
|
|
1246
|
+
self.title = m.get('title')
|
|
1247
|
+
if m.get('url') is not None:
|
|
1248
|
+
self.url = m.get('url')
|
|
1249
|
+
return self
|
|
1250
|
+
|
|
1251
|
+
|
|
1423
1252
|
class RunHotTopicChatResponseBodyPayloadOutputHotTopicSummaries(TeaModel):
|
|
1424
1253
|
def __init__(
|
|
1425
1254
|
self,
|
|
1426
1255
|
custom_hot_value: float = None,
|
|
1256
|
+
custom_text_summary: str = None,
|
|
1427
1257
|
hot_topic: str = None,
|
|
1428
1258
|
hot_topic_version: str = None,
|
|
1429
1259
|
hot_value: float = None,
|
|
1260
|
+
images: List[RunHotTopicChatResponseBodyPayloadOutputHotTopicSummariesImages] = None,
|
|
1261
|
+
news: List[RunHotTopicChatResponseBodyPayloadOutputHotTopicSummariesNews] = None,
|
|
1262
|
+
text_summary: str = None,
|
|
1430
1263
|
):
|
|
1431
1264
|
self.custom_hot_value = custom_hot_value
|
|
1265
|
+
self.custom_text_summary = custom_text_summary
|
|
1432
1266
|
self.hot_topic = hot_topic
|
|
1433
1267
|
self.hot_topic_version = hot_topic_version
|
|
1434
1268
|
self.hot_value = hot_value
|
|
1269
|
+
self.images = images
|
|
1270
|
+
self.news = news
|
|
1271
|
+
self.text_summary = text_summary
|
|
1435
1272
|
|
|
1436
1273
|
def validate(self):
|
|
1437
|
-
|
|
1274
|
+
if self.images:
|
|
1275
|
+
for k in self.images:
|
|
1276
|
+
if k:
|
|
1277
|
+
k.validate()
|
|
1278
|
+
if self.news:
|
|
1279
|
+
for k in self.news:
|
|
1280
|
+
if k:
|
|
1281
|
+
k.validate()
|
|
1438
1282
|
|
|
1439
1283
|
def to_map(self):
|
|
1440
1284
|
_map = super().to_map()
|
|
@@ -1444,24 +1288,50 @@ class RunHotTopicChatResponseBodyPayloadOutputHotTopicSummaries(TeaModel):
|
|
|
1444
1288
|
result = dict()
|
|
1445
1289
|
if self.custom_hot_value is not None:
|
|
1446
1290
|
result['customHotValue'] = self.custom_hot_value
|
|
1291
|
+
if self.custom_text_summary is not None:
|
|
1292
|
+
result['customTextSummary'] = self.custom_text_summary
|
|
1447
1293
|
if self.hot_topic is not None:
|
|
1448
1294
|
result['hotTopic'] = self.hot_topic
|
|
1449
1295
|
if self.hot_topic_version is not None:
|
|
1450
1296
|
result['hotTopicVersion'] = self.hot_topic_version
|
|
1451
1297
|
if self.hot_value is not None:
|
|
1452
1298
|
result['hotValue'] = self.hot_value
|
|
1299
|
+
result['images'] = []
|
|
1300
|
+
if self.images is not None:
|
|
1301
|
+
for k in self.images:
|
|
1302
|
+
result['images'].append(k.to_map() if k else None)
|
|
1303
|
+
result['news'] = []
|
|
1304
|
+
if self.news is not None:
|
|
1305
|
+
for k in self.news:
|
|
1306
|
+
result['news'].append(k.to_map() if k else None)
|
|
1307
|
+
if self.text_summary is not None:
|
|
1308
|
+
result['textSummary'] = self.text_summary
|
|
1453
1309
|
return result
|
|
1454
1310
|
|
|
1455
1311
|
def from_map(self, m: dict = None):
|
|
1456
1312
|
m = m or dict()
|
|
1457
1313
|
if m.get('customHotValue') is not None:
|
|
1458
1314
|
self.custom_hot_value = m.get('customHotValue')
|
|
1315
|
+
if m.get('customTextSummary') is not None:
|
|
1316
|
+
self.custom_text_summary = m.get('customTextSummary')
|
|
1459
1317
|
if m.get('hotTopic') is not None:
|
|
1460
1318
|
self.hot_topic = m.get('hotTopic')
|
|
1461
1319
|
if m.get('hotTopicVersion') is not None:
|
|
1462
1320
|
self.hot_topic_version = m.get('hotTopicVersion')
|
|
1463
1321
|
if m.get('hotValue') is not None:
|
|
1464
1322
|
self.hot_value = m.get('hotValue')
|
|
1323
|
+
self.images = []
|
|
1324
|
+
if m.get('images') is not None:
|
|
1325
|
+
for k in m.get('images'):
|
|
1326
|
+
temp_model = RunHotTopicChatResponseBodyPayloadOutputHotTopicSummariesImages()
|
|
1327
|
+
self.images.append(temp_model.from_map(k))
|
|
1328
|
+
self.news = []
|
|
1329
|
+
if m.get('news') is not None:
|
|
1330
|
+
for k in m.get('news'):
|
|
1331
|
+
temp_model = RunHotTopicChatResponseBodyPayloadOutputHotTopicSummariesNews()
|
|
1332
|
+
self.news.append(temp_model.from_map(k))
|
|
1333
|
+
if m.get('textSummary') is not None:
|
|
1334
|
+
self.text_summary = m.get('textSummary')
|
|
1465
1335
|
return self
|
|
1466
1336
|
|
|
1467
1337
|
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: alibabacloud-quanmiaolightapp20240801
|
|
3
|
-
Version:
|
|
3
|
+
Version: 2.0.1
|
|
4
4
|
Summary: Alibaba Cloud QuanMiaoLightApp (20240801) SDK Library for Python
|
|
5
5
|
Home-page: https://github.com/aliyun/alibabacloud-python-sdk
|
|
6
6
|
Author: Alibaba Cloud SDK
|
alibabacloud_quanmiaolightapp20240801-1.4.1/alibabacloud_quanmiaolightapp20240801/__init__.py
DELETED
|
@@ -1 +0,0 @@
|
|
|
1
|
-
__version__ = '1.4.1'
|
{alibabacloud_quanmiaolightapp20240801-1.4.1 → alibabacloud_quanmiaolightapp20240801-2.0.1}/LICENSE
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|