alibabacloud-quanmiaolightapp20240801 2.13.2__py3-none-any.whl → 2.13.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- alibabacloud_quanmiaolightapp20240801/__init__.py +1 -1
- alibabacloud_quanmiaolightapp20240801/client.py +4448 -3769
- alibabacloud_quanmiaolightapp20240801/models/__init__.py +691 -0
- alibabacloud_quanmiaolightapp20240801/models/_cancel_async_task_request.py +33 -0
- alibabacloud_quanmiaolightapp20240801/models/_cancel_async_task_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_cancel_async_task_response_body.py +74 -0
- alibabacloud_quanmiaolightapp20240801/models/_export_analysis_tag_detail_by_task_id_request.py +52 -0
- alibabacloud_quanmiaolightapp20240801/models/_export_analysis_tag_detail_by_task_id_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_export_analysis_tag_detail_by_task_id_response_body.py +66 -0
- alibabacloud_quanmiaolightapp20240801/models/_export_analysis_tag_detail_by_task_id_shrink_request.py +50 -0
- alibabacloud_quanmiaolightapp20240801/models/_generate_broadcast_news_request.py +34 -0
- alibabacloud_quanmiaolightapp20240801/models/_generate_broadcast_news_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_generate_broadcast_news_response_body.py +303 -0
- alibabacloud_quanmiaolightapp20240801/models/_generate_output_format_request.py +112 -0
- alibabacloud_quanmiaolightapp20240801/models/_generate_output_format_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_generate_output_format_response_body.py +104 -0
- alibabacloud_quanmiaolightapp20240801/models/_generate_output_format_shrink_request.py +66 -0
- alibabacloud_quanmiaolightapp20240801/models/_get_enterprise_voc_analysis_task_request.py +33 -0
- alibabacloud_quanmiaolightapp20240801/models/_get_enterprise_voc_analysis_task_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_get_enterprise_voc_analysis_task_response_body.py +374 -0
- alibabacloud_quanmiaolightapp20240801/models/_get_essay_correction_task_request.py +33 -0
- alibabacloud_quanmiaolightapp20240801/models/_get_essay_correction_task_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_get_essay_correction_task_response_body.py +174 -0
- alibabacloud_quanmiaolightapp20240801/models/_get_file_content_request.py +33 -0
- alibabacloud_quanmiaolightapp20240801/models/_get_file_content_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_get_file_content_response_body.py +104 -0
- alibabacloud_quanmiaolightapp20240801/models/_get_tag_mining_analysis_task_request.py +33 -0
- alibabacloud_quanmiaolightapp20240801/models/_get_tag_mining_analysis_task_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_get_tag_mining_analysis_task_response_body.py +347 -0
- alibabacloud_quanmiaolightapp20240801/models/_get_video_analysis_config_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_get_video_analysis_config_response_body.py +104 -0
- alibabacloud_quanmiaolightapp20240801/models/_get_video_analysis_task_request.py +34 -0
- alibabacloud_quanmiaolightapp20240801/models/_get_video_analysis_task_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_get_video_analysis_task_response_body.py +1620 -0
- alibabacloud_quanmiaolightapp20240801/models/_get_video_detect_shot_config_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_get_video_detect_shot_config_response_body.py +106 -0
- alibabacloud_quanmiaolightapp20240801/models/_get_video_detect_shot_task_request.py +34 -0
- alibabacloud_quanmiaolightapp20240801/models/_get_video_detect_shot_task_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_get_video_detect_shot_task_response_body.py +494 -0
- alibabacloud_quanmiaolightapp20240801/models/_hot_news_recommend_request.py +33 -0
- alibabacloud_quanmiaolightapp20240801/models/_hot_news_recommend_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_hot_news_recommend_response_body.py +180 -0
- alibabacloud_quanmiaolightapp20240801/models/_list_analysis_tag_detail_by_task_id_request.py +50 -0
- alibabacloud_quanmiaolightapp20240801/models/_list_analysis_tag_detail_by_task_id_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_list_analysis_tag_detail_by_task_id_response_body.py +196 -0
- alibabacloud_quanmiaolightapp20240801/models/_list_hot_topic_summaries_request.py +65 -0
- alibabacloud_quanmiaolightapp20240801/models/_list_hot_topic_summaries_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_list_hot_topic_summaries_response_body.py +367 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_enterprise_voc_analysis_request.py +203 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_enterprise_voc_analysis_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_enterprise_voc_analysis_response_body.py +331 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_enterprise_voc_analysis_shrink_request.py +109 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_essay_correction_request.py +81 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_essay_correction_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_essay_correction_response_body.py +241 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_hot_topic_chat_request.py +264 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_hot_topic_chat_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_hot_topic_chat_response_body.py +636 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_hot_topic_chat_shrink_request.py +121 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_hot_topic_summary_request.py +100 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_hot_topic_summary_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_hot_topic_summary_response_body.py +241 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_hot_topic_summary_shrink_request.py +52 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_marketing_information_extract_request.py +59 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_marketing_information_extract_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_marketing_information_extract_response_body.py +232 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_marketing_information_extract_shrink_request.py +57 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_marketing_information_writing_request.py +89 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_marketing_information_writing_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_marketing_information_writing_response_body.py +248 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_network_content_audit_request.py +136 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_network_content_audit_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_network_content_audit_response_body.py +233 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_network_content_audit_shrink_request.py +90 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_ocr_parse_request.py +49 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_ocr_parse_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_ocr_parse_response_body.py +233 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_script_chat_request.py +42 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_script_chat_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_script_chat_response_body.py +248 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_script_continue_request.py +50 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_script_continue_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_script_continue_response_body.py +248 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_script_planning_request.py +82 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_script_planning_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_script_planning_response_body.py +248 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_script_refine_request.py +33 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_script_refine_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_script_refine_response_body.py +290 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_style_writing_request.py +75 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_style_writing_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_style_writing_response_body.py +248 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_style_writing_shrink_request.py +73 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_tag_mining_analysis_request.py +136 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_tag_mining_analysis_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_tag_mining_analysis_response_body.py +233 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_tag_mining_analysis_shrink_request.py +90 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_video_analysis_request.py +600 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_video_analysis_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_video_analysis_response_body.py +1668 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_video_analysis_shrink_request.py +209 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_video_detect_shot_request.py +142 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_video_detect_shot_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_video_detect_shot_response_body.py +363 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_video_detect_shot_shrink_request.py +140 -0
- alibabacloud_quanmiaolightapp20240801/models/_submit_enterprise_voc_analysis_task_request.py +247 -0
- alibabacloud_quanmiaolightapp20240801/models/_submit_enterprise_voc_analysis_task_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_submit_enterprise_voc_analysis_task_response_body.py +104 -0
- alibabacloud_quanmiaolightapp20240801/models/_submit_enterprise_voc_analysis_task_shrink_request.py +113 -0
- alibabacloud_quanmiaolightapp20240801/models/_submit_essay_correction_task_request.py +167 -0
- alibabacloud_quanmiaolightapp20240801/models/_submit_essay_correction_task_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_submit_essay_correction_task_response_body.py +103 -0
- alibabacloud_quanmiaolightapp20240801/models/_submit_essay_correction_task_shrink_request.py +81 -0
- alibabacloud_quanmiaolightapp20240801/models/_submit_tag_mining_analysis_task_request.py +143 -0
- alibabacloud_quanmiaolightapp20240801/models/_submit_tag_mining_analysis_task_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_submit_tag_mining_analysis_task_response_body.py +104 -0
- alibabacloud_quanmiaolightapp20240801/models/_submit_tag_mining_analysis_task_shrink_request.py +97 -0
- alibabacloud_quanmiaolightapp20240801/models/_submit_video_analysis_task_request.py +593 -0
- alibabacloud_quanmiaolightapp20240801/models/_submit_video_analysis_task_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_submit_video_analysis_task_response_body.py +103 -0
- alibabacloud_quanmiaolightapp20240801/models/_submit_video_analysis_task_shrink_request.py +202 -0
- alibabacloud_quanmiaolightapp20240801/models/_submit_video_detect_shot_task_request.py +148 -0
- alibabacloud_quanmiaolightapp20240801/models/_submit_video_detect_shot_task_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_submit_video_detect_shot_task_response_body.py +104 -0
- alibabacloud_quanmiaolightapp20240801/models/_submit_video_detect_shot_task_shrink_request.py +146 -0
- alibabacloud_quanmiaolightapp20240801/models/_update_video_analysis_config_request.py +34 -0
- alibabacloud_quanmiaolightapp20240801/models/_update_video_analysis_config_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_update_video_analysis_config_response_body.py +66 -0
- alibabacloud_quanmiaolightapp20240801/models/_update_video_analysis_task_request.py +43 -0
- alibabacloud_quanmiaolightapp20240801/models/_update_video_analysis_task_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_update_video_analysis_task_response_body.py +119 -0
- alibabacloud_quanmiaolightapp20240801/models/_update_video_analysis_tasks_request.py +45 -0
- alibabacloud_quanmiaolightapp20240801/models/_update_video_analysis_tasks_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_update_video_analysis_tasks_response_body.py +136 -0
- alibabacloud_quanmiaolightapp20240801/models/_update_video_analysis_tasks_shrink_request.py +43 -0
- alibabacloud_quanmiaolightapp20240801/models/_update_video_detect_shot_config_request.py +34 -0
- alibabacloud_quanmiaolightapp20240801/models/_update_video_detect_shot_config_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_update_video_detect_shot_config_response_body.py +66 -0
- alibabacloud_quanmiaolightapp20240801/models/_update_video_detect_shot_task_request.py +43 -0
- alibabacloud_quanmiaolightapp20240801/models/_update_video_detect_shot_task_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_update_video_detect_shot_task_response_body.py +120 -0
- {alibabacloud_quanmiaolightapp20240801-2.13.2.dist-info → alibabacloud_quanmiaolightapp20240801-2.13.3.dist-info}/METADATA +7 -7
- alibabacloud_quanmiaolightapp20240801-2.13.3.dist-info/RECORD +146 -0
- alibabacloud_quanmiaolightapp20240801/models.py +0 -16578
- alibabacloud_quanmiaolightapp20240801-2.13.2.dist-info/RECORD +0 -8
- {alibabacloud_quanmiaolightapp20240801-2.13.2.dist-info → alibabacloud_quanmiaolightapp20240801-2.13.3.dist-info}/LICENSE +0 -0
- {alibabacloud_quanmiaolightapp20240801-2.13.2.dist-info → alibabacloud_quanmiaolightapp20240801-2.13.3.dist-info}/WHEEL +0 -0
- {alibabacloud_quanmiaolightapp20240801-2.13.2.dist-info → alibabacloud_quanmiaolightapp20240801-2.13.3.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,121 @@
|
|
|
1
|
+
# -*- coding: utf-8 -*-
|
|
2
|
+
# This file is auto-generated, don't edit it. Thanks.
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from darabonba.model import DaraModel
|
|
6
|
+
|
|
7
|
+
class RunHotTopicChatShrinkRequest(DaraModel):
|
|
8
|
+
def __init__(
|
|
9
|
+
self,
|
|
10
|
+
category: str = None,
|
|
11
|
+
generate_options_shrink: str = None,
|
|
12
|
+
hot_topic_version: str = None,
|
|
13
|
+
hot_topics_shrink: str = None,
|
|
14
|
+
image_count: int = None,
|
|
15
|
+
messages_shrink: str = None,
|
|
16
|
+
model_custom_prompt_template: str = None,
|
|
17
|
+
model_id: str = None,
|
|
18
|
+
original_session_id: str = None,
|
|
19
|
+
prompt: str = None,
|
|
20
|
+
step_for_broadcast_content_config_shrink: str = None,
|
|
21
|
+
task_id: str = None,
|
|
22
|
+
):
|
|
23
|
+
self.category = category
|
|
24
|
+
self.generate_options_shrink = generate_options_shrink
|
|
25
|
+
self.hot_topic_version = hot_topic_version
|
|
26
|
+
self.hot_topics_shrink = hot_topics_shrink
|
|
27
|
+
self.image_count = image_count
|
|
28
|
+
self.messages_shrink = messages_shrink
|
|
29
|
+
self.model_custom_prompt_template = model_custom_prompt_template
|
|
30
|
+
self.model_id = model_id
|
|
31
|
+
self.original_session_id = original_session_id
|
|
32
|
+
self.prompt = prompt
|
|
33
|
+
self.step_for_broadcast_content_config_shrink = step_for_broadcast_content_config_shrink
|
|
34
|
+
self.task_id = task_id
|
|
35
|
+
|
|
36
|
+
def validate(self):
|
|
37
|
+
pass
|
|
38
|
+
|
|
39
|
+
def to_map(self):
|
|
40
|
+
result = dict()
|
|
41
|
+
_map = super().to_map()
|
|
42
|
+
if _map is not None:
|
|
43
|
+
result = _map
|
|
44
|
+
if self.category is not None:
|
|
45
|
+
result['category'] = self.category
|
|
46
|
+
|
|
47
|
+
if self.generate_options_shrink is not None:
|
|
48
|
+
result['generateOptions'] = self.generate_options_shrink
|
|
49
|
+
|
|
50
|
+
if self.hot_topic_version is not None:
|
|
51
|
+
result['hotTopicVersion'] = self.hot_topic_version
|
|
52
|
+
|
|
53
|
+
if self.hot_topics_shrink is not None:
|
|
54
|
+
result['hotTopics'] = self.hot_topics_shrink
|
|
55
|
+
|
|
56
|
+
if self.image_count is not None:
|
|
57
|
+
result['imageCount'] = self.image_count
|
|
58
|
+
|
|
59
|
+
if self.messages_shrink is not None:
|
|
60
|
+
result['messages'] = self.messages_shrink
|
|
61
|
+
|
|
62
|
+
if self.model_custom_prompt_template is not None:
|
|
63
|
+
result['modelCustomPromptTemplate'] = self.model_custom_prompt_template
|
|
64
|
+
|
|
65
|
+
if self.model_id is not None:
|
|
66
|
+
result['modelId'] = self.model_id
|
|
67
|
+
|
|
68
|
+
if self.original_session_id is not None:
|
|
69
|
+
result['originalSessionId'] = self.original_session_id
|
|
70
|
+
|
|
71
|
+
if self.prompt is not None:
|
|
72
|
+
result['prompt'] = self.prompt
|
|
73
|
+
|
|
74
|
+
if self.step_for_broadcast_content_config_shrink is not None:
|
|
75
|
+
result['stepForBroadcastContentConfig'] = self.step_for_broadcast_content_config_shrink
|
|
76
|
+
|
|
77
|
+
if self.task_id is not None:
|
|
78
|
+
result['taskId'] = self.task_id
|
|
79
|
+
|
|
80
|
+
return result
|
|
81
|
+
|
|
82
|
+
def from_map(self, m: dict = None):
|
|
83
|
+
m = m or dict()
|
|
84
|
+
if m.get('category') is not None:
|
|
85
|
+
self.category = m.get('category')
|
|
86
|
+
|
|
87
|
+
if m.get('generateOptions') is not None:
|
|
88
|
+
self.generate_options_shrink = m.get('generateOptions')
|
|
89
|
+
|
|
90
|
+
if m.get('hotTopicVersion') is not None:
|
|
91
|
+
self.hot_topic_version = m.get('hotTopicVersion')
|
|
92
|
+
|
|
93
|
+
if m.get('hotTopics') is not None:
|
|
94
|
+
self.hot_topics_shrink = m.get('hotTopics')
|
|
95
|
+
|
|
96
|
+
if m.get('imageCount') is not None:
|
|
97
|
+
self.image_count = m.get('imageCount')
|
|
98
|
+
|
|
99
|
+
if m.get('messages') is not None:
|
|
100
|
+
self.messages_shrink = m.get('messages')
|
|
101
|
+
|
|
102
|
+
if m.get('modelCustomPromptTemplate') is not None:
|
|
103
|
+
self.model_custom_prompt_template = m.get('modelCustomPromptTemplate')
|
|
104
|
+
|
|
105
|
+
if m.get('modelId') is not None:
|
|
106
|
+
self.model_id = m.get('modelId')
|
|
107
|
+
|
|
108
|
+
if m.get('originalSessionId') is not None:
|
|
109
|
+
self.original_session_id = m.get('originalSessionId')
|
|
110
|
+
|
|
111
|
+
if m.get('prompt') is not None:
|
|
112
|
+
self.prompt = m.get('prompt')
|
|
113
|
+
|
|
114
|
+
if m.get('stepForBroadcastContentConfig') is not None:
|
|
115
|
+
self.step_for_broadcast_content_config_shrink = m.get('stepForBroadcastContentConfig')
|
|
116
|
+
|
|
117
|
+
if m.get('taskId') is not None:
|
|
118
|
+
self.task_id = m.get('taskId')
|
|
119
|
+
|
|
120
|
+
return self
|
|
121
|
+
|
|
@@ -0,0 +1,100 @@
|
|
|
1
|
+
# -*- coding: utf-8 -*-
|
|
2
|
+
# This file is auto-generated, don't edit it. Thanks.
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from typing import List
|
|
6
|
+
|
|
7
|
+
from alibabacloud_quanmiaolightapp20240801 import models as main_models
|
|
8
|
+
from darabonba.model import DaraModel
|
|
9
|
+
|
|
10
|
+
class RunHotTopicSummaryRequest(DaraModel):
|
|
11
|
+
def __init__(
|
|
12
|
+
self,
|
|
13
|
+
hot_topic_version: str = None,
|
|
14
|
+
step_for_custom_summary_style_config: main_models.RunHotTopicSummaryRequestStepForCustomSummaryStyleConfig = None,
|
|
15
|
+
topic_ids: List[str] = None,
|
|
16
|
+
):
|
|
17
|
+
# This parameter is required.
|
|
18
|
+
self.hot_topic_version = hot_topic_version
|
|
19
|
+
# This parameter is required.
|
|
20
|
+
self.step_for_custom_summary_style_config = step_for_custom_summary_style_config
|
|
21
|
+
# This parameter is required.
|
|
22
|
+
self.topic_ids = topic_ids
|
|
23
|
+
|
|
24
|
+
def validate(self):
|
|
25
|
+
if self.step_for_custom_summary_style_config:
|
|
26
|
+
self.step_for_custom_summary_style_config.validate()
|
|
27
|
+
|
|
28
|
+
def to_map(self):
|
|
29
|
+
result = dict()
|
|
30
|
+
_map = super().to_map()
|
|
31
|
+
if _map is not None:
|
|
32
|
+
result = _map
|
|
33
|
+
if self.hot_topic_version is not None:
|
|
34
|
+
result['hotTopicVersion'] = self.hot_topic_version
|
|
35
|
+
|
|
36
|
+
if self.step_for_custom_summary_style_config is not None:
|
|
37
|
+
result['stepForCustomSummaryStyleConfig'] = self.step_for_custom_summary_style_config.to_map()
|
|
38
|
+
|
|
39
|
+
if self.topic_ids is not None:
|
|
40
|
+
result['topicIds'] = self.topic_ids
|
|
41
|
+
|
|
42
|
+
return result
|
|
43
|
+
|
|
44
|
+
def from_map(self, m: dict = None):
|
|
45
|
+
m = m or dict()
|
|
46
|
+
if m.get('hotTopicVersion') is not None:
|
|
47
|
+
self.hot_topic_version = m.get('hotTopicVersion')
|
|
48
|
+
|
|
49
|
+
if m.get('stepForCustomSummaryStyleConfig') is not None:
|
|
50
|
+
temp_model = main_models.RunHotTopicSummaryRequestStepForCustomSummaryStyleConfig()
|
|
51
|
+
self.step_for_custom_summary_style_config = temp_model.from_map(m.get('stepForCustomSummaryStyleConfig'))
|
|
52
|
+
|
|
53
|
+
if m.get('topicIds') is not None:
|
|
54
|
+
self.topic_ids = m.get('topicIds')
|
|
55
|
+
|
|
56
|
+
return self
|
|
57
|
+
|
|
58
|
+
class RunHotTopicSummaryRequestStepForCustomSummaryStyleConfig(DaraModel):
|
|
59
|
+
def __init__(
|
|
60
|
+
self,
|
|
61
|
+
summary_image_count: int = None,
|
|
62
|
+
summary_model: str = None,
|
|
63
|
+
summary_prompt: str = None,
|
|
64
|
+
):
|
|
65
|
+
self.summary_image_count = summary_image_count
|
|
66
|
+
self.summary_model = summary_model
|
|
67
|
+
self.summary_prompt = summary_prompt
|
|
68
|
+
|
|
69
|
+
def validate(self):
|
|
70
|
+
pass
|
|
71
|
+
|
|
72
|
+
def to_map(self):
|
|
73
|
+
result = dict()
|
|
74
|
+
_map = super().to_map()
|
|
75
|
+
if _map is not None:
|
|
76
|
+
result = _map
|
|
77
|
+
if self.summary_image_count is not None:
|
|
78
|
+
result['summaryImageCount'] = self.summary_image_count
|
|
79
|
+
|
|
80
|
+
if self.summary_model is not None:
|
|
81
|
+
result['summaryModel'] = self.summary_model
|
|
82
|
+
|
|
83
|
+
if self.summary_prompt is not None:
|
|
84
|
+
result['summaryPrompt'] = self.summary_prompt
|
|
85
|
+
|
|
86
|
+
return result
|
|
87
|
+
|
|
88
|
+
def from_map(self, m: dict = None):
|
|
89
|
+
m = m or dict()
|
|
90
|
+
if m.get('summaryImageCount') is not None:
|
|
91
|
+
self.summary_image_count = m.get('summaryImageCount')
|
|
92
|
+
|
|
93
|
+
if m.get('summaryModel') is not None:
|
|
94
|
+
self.summary_model = m.get('summaryModel')
|
|
95
|
+
|
|
96
|
+
if m.get('summaryPrompt') is not None:
|
|
97
|
+
self.summary_prompt = m.get('summaryPrompt')
|
|
98
|
+
|
|
99
|
+
return self
|
|
100
|
+
|
|
@@ -0,0 +1,54 @@
|
|
|
1
|
+
# -*- coding: utf-8 -*-
|
|
2
|
+
# This file is auto-generated, don't edit it. Thanks.
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from typing import Dict
|
|
6
|
+
|
|
7
|
+
from alibabacloud_quanmiaolightapp20240801 import models as main_models
|
|
8
|
+
from darabonba.model import DaraModel
|
|
9
|
+
|
|
10
|
+
class RunHotTopicSummaryResponse(DaraModel):
|
|
11
|
+
def __init__(
|
|
12
|
+
self,
|
|
13
|
+
headers: Dict[str, str] = None,
|
|
14
|
+
status_code: int = None,
|
|
15
|
+
body: main_models.RunHotTopicSummaryResponseBody = None,
|
|
16
|
+
):
|
|
17
|
+
self.headers = headers
|
|
18
|
+
self.status_code = status_code
|
|
19
|
+
self.body = body
|
|
20
|
+
|
|
21
|
+
def validate(self):
|
|
22
|
+
if self.body:
|
|
23
|
+
self.body.validate()
|
|
24
|
+
|
|
25
|
+
def to_map(self):
|
|
26
|
+
result = dict()
|
|
27
|
+
_map = super().to_map()
|
|
28
|
+
if _map is not None:
|
|
29
|
+
result = _map
|
|
30
|
+
if self.headers is not None:
|
|
31
|
+
result['headers'] = self.headers
|
|
32
|
+
|
|
33
|
+
if self.status_code is not None:
|
|
34
|
+
result['statusCode'] = self.status_code
|
|
35
|
+
|
|
36
|
+
if self.body is not None:
|
|
37
|
+
result['body'] = self.body.to_map()
|
|
38
|
+
|
|
39
|
+
return result
|
|
40
|
+
|
|
41
|
+
def from_map(self, m: dict = None):
|
|
42
|
+
m = m or dict()
|
|
43
|
+
if m.get('headers') is not None:
|
|
44
|
+
self.headers = m.get('headers')
|
|
45
|
+
|
|
46
|
+
if m.get('statusCode') is not None:
|
|
47
|
+
self.status_code = m.get('statusCode')
|
|
48
|
+
|
|
49
|
+
if m.get('body') is not None:
|
|
50
|
+
temp_model = main_models.RunHotTopicSummaryResponseBody()
|
|
51
|
+
self.body = temp_model.from_map(m.get('body'))
|
|
52
|
+
|
|
53
|
+
return self
|
|
54
|
+
|
|
@@ -0,0 +1,241 @@
|
|
|
1
|
+
# -*- coding: utf-8 -*-
|
|
2
|
+
# This file is auto-generated, don't edit it. Thanks.
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from alibabacloud_quanmiaolightapp20240801 import models as main_models
|
|
6
|
+
from darabonba.model import DaraModel
|
|
7
|
+
|
|
8
|
+
class RunHotTopicSummaryResponseBody(DaraModel):
|
|
9
|
+
def __init__(
|
|
10
|
+
self,
|
|
11
|
+
header: main_models.RunHotTopicSummaryResponseBodyHeader = None,
|
|
12
|
+
payload: main_models.RunHotTopicSummaryResponseBodyPayload = None,
|
|
13
|
+
request_id: str = None,
|
|
14
|
+
):
|
|
15
|
+
self.header = header
|
|
16
|
+
self.payload = payload
|
|
17
|
+
# Id of the request
|
|
18
|
+
self.request_id = request_id
|
|
19
|
+
|
|
20
|
+
def validate(self):
|
|
21
|
+
if self.header:
|
|
22
|
+
self.header.validate()
|
|
23
|
+
if self.payload:
|
|
24
|
+
self.payload.validate()
|
|
25
|
+
|
|
26
|
+
def to_map(self):
|
|
27
|
+
result = dict()
|
|
28
|
+
_map = super().to_map()
|
|
29
|
+
if _map is not None:
|
|
30
|
+
result = _map
|
|
31
|
+
if self.header is not None:
|
|
32
|
+
result['header'] = self.header.to_map()
|
|
33
|
+
|
|
34
|
+
if self.payload is not None:
|
|
35
|
+
result['payload'] = self.payload.to_map()
|
|
36
|
+
|
|
37
|
+
if self.request_id is not None:
|
|
38
|
+
result['requestId'] = self.request_id
|
|
39
|
+
|
|
40
|
+
return result
|
|
41
|
+
|
|
42
|
+
def from_map(self, m: dict = None):
|
|
43
|
+
m = m or dict()
|
|
44
|
+
if m.get('header') is not None:
|
|
45
|
+
temp_model = main_models.RunHotTopicSummaryResponseBodyHeader()
|
|
46
|
+
self.header = temp_model.from_map(m.get('header'))
|
|
47
|
+
|
|
48
|
+
if m.get('payload') is not None:
|
|
49
|
+
temp_model = main_models.RunHotTopicSummaryResponseBodyPayload()
|
|
50
|
+
self.payload = temp_model.from_map(m.get('payload'))
|
|
51
|
+
|
|
52
|
+
if m.get('requestId') is not None:
|
|
53
|
+
self.request_id = m.get('requestId')
|
|
54
|
+
|
|
55
|
+
return self
|
|
56
|
+
|
|
57
|
+
class RunHotTopicSummaryResponseBodyPayload(DaraModel):
|
|
58
|
+
def __init__(
|
|
59
|
+
self,
|
|
60
|
+
output: main_models.RunHotTopicSummaryResponseBodyPayloadOutput = None,
|
|
61
|
+
usage: main_models.RunHotTopicSummaryResponseBodyPayloadUsage = None,
|
|
62
|
+
):
|
|
63
|
+
self.output = output
|
|
64
|
+
self.usage = usage
|
|
65
|
+
|
|
66
|
+
def validate(self):
|
|
67
|
+
if self.output:
|
|
68
|
+
self.output.validate()
|
|
69
|
+
if self.usage:
|
|
70
|
+
self.usage.validate()
|
|
71
|
+
|
|
72
|
+
def to_map(self):
|
|
73
|
+
result = dict()
|
|
74
|
+
_map = super().to_map()
|
|
75
|
+
if _map is not None:
|
|
76
|
+
result = _map
|
|
77
|
+
if self.output is not None:
|
|
78
|
+
result['output'] = self.output.to_map()
|
|
79
|
+
|
|
80
|
+
if self.usage is not None:
|
|
81
|
+
result['usage'] = self.usage.to_map()
|
|
82
|
+
|
|
83
|
+
return result
|
|
84
|
+
|
|
85
|
+
def from_map(self, m: dict = None):
|
|
86
|
+
m = m or dict()
|
|
87
|
+
if m.get('output') is not None:
|
|
88
|
+
temp_model = main_models.RunHotTopicSummaryResponseBodyPayloadOutput()
|
|
89
|
+
self.output = temp_model.from_map(m.get('output'))
|
|
90
|
+
|
|
91
|
+
if m.get('usage') is not None:
|
|
92
|
+
temp_model = main_models.RunHotTopicSummaryResponseBodyPayloadUsage()
|
|
93
|
+
self.usage = temp_model.from_map(m.get('usage'))
|
|
94
|
+
|
|
95
|
+
return self
|
|
96
|
+
|
|
97
|
+
class RunHotTopicSummaryResponseBodyPayloadUsage(DaraModel):
|
|
98
|
+
def __init__(
|
|
99
|
+
self,
|
|
100
|
+
input_tokens: int = None,
|
|
101
|
+
output_tokens: int = None,
|
|
102
|
+
total_tokens: int = None,
|
|
103
|
+
):
|
|
104
|
+
self.input_tokens = input_tokens
|
|
105
|
+
self.output_tokens = output_tokens
|
|
106
|
+
self.total_tokens = total_tokens
|
|
107
|
+
|
|
108
|
+
def validate(self):
|
|
109
|
+
pass
|
|
110
|
+
|
|
111
|
+
def to_map(self):
|
|
112
|
+
result = dict()
|
|
113
|
+
_map = super().to_map()
|
|
114
|
+
if _map is not None:
|
|
115
|
+
result = _map
|
|
116
|
+
if self.input_tokens is not None:
|
|
117
|
+
result['inputTokens'] = self.input_tokens
|
|
118
|
+
|
|
119
|
+
if self.output_tokens is not None:
|
|
120
|
+
result['outputTokens'] = self.output_tokens
|
|
121
|
+
|
|
122
|
+
if self.total_tokens is not None:
|
|
123
|
+
result['totalTokens'] = self.total_tokens
|
|
124
|
+
|
|
125
|
+
return result
|
|
126
|
+
|
|
127
|
+
def from_map(self, m: dict = None):
|
|
128
|
+
m = m or dict()
|
|
129
|
+
if m.get('inputTokens') is not None:
|
|
130
|
+
self.input_tokens = m.get('inputTokens')
|
|
131
|
+
|
|
132
|
+
if m.get('outputTokens') is not None:
|
|
133
|
+
self.output_tokens = m.get('outputTokens')
|
|
134
|
+
|
|
135
|
+
if m.get('totalTokens') is not None:
|
|
136
|
+
self.total_tokens = m.get('totalTokens')
|
|
137
|
+
|
|
138
|
+
return self
|
|
139
|
+
|
|
140
|
+
class RunHotTopicSummaryResponseBodyPayloadOutput(DaraModel):
|
|
141
|
+
def __init__(
|
|
142
|
+
self,
|
|
143
|
+
text: str = None,
|
|
144
|
+
topic_id: str = None,
|
|
145
|
+
):
|
|
146
|
+
self.text = text
|
|
147
|
+
self.topic_id = topic_id
|
|
148
|
+
|
|
149
|
+
def validate(self):
|
|
150
|
+
pass
|
|
151
|
+
|
|
152
|
+
def to_map(self):
|
|
153
|
+
result = dict()
|
|
154
|
+
_map = super().to_map()
|
|
155
|
+
if _map is not None:
|
|
156
|
+
result = _map
|
|
157
|
+
if self.text is not None:
|
|
158
|
+
result['text'] = self.text
|
|
159
|
+
|
|
160
|
+
if self.topic_id is not None:
|
|
161
|
+
result['topicId'] = self.topic_id
|
|
162
|
+
|
|
163
|
+
return result
|
|
164
|
+
|
|
165
|
+
def from_map(self, m: dict = None):
|
|
166
|
+
m = m or dict()
|
|
167
|
+
if m.get('text') is not None:
|
|
168
|
+
self.text = m.get('text')
|
|
169
|
+
|
|
170
|
+
if m.get('topicId') is not None:
|
|
171
|
+
self.topic_id = m.get('topicId')
|
|
172
|
+
|
|
173
|
+
return self
|
|
174
|
+
|
|
175
|
+
class RunHotTopicSummaryResponseBodyHeader(DaraModel):
|
|
176
|
+
def __init__(
|
|
177
|
+
self,
|
|
178
|
+
error_code: str = None,
|
|
179
|
+
error_message: str = None,
|
|
180
|
+
event: str = None,
|
|
181
|
+
session_id: str = None,
|
|
182
|
+
task_id: str = None,
|
|
183
|
+
trace_id: str = None,
|
|
184
|
+
):
|
|
185
|
+
self.error_code = error_code
|
|
186
|
+
self.error_message = error_message
|
|
187
|
+
self.event = event
|
|
188
|
+
self.session_id = session_id
|
|
189
|
+
self.task_id = task_id
|
|
190
|
+
self.trace_id = trace_id
|
|
191
|
+
|
|
192
|
+
def validate(self):
|
|
193
|
+
pass
|
|
194
|
+
|
|
195
|
+
def to_map(self):
|
|
196
|
+
result = dict()
|
|
197
|
+
_map = super().to_map()
|
|
198
|
+
if _map is not None:
|
|
199
|
+
result = _map
|
|
200
|
+
if self.error_code is not None:
|
|
201
|
+
result['errorCode'] = self.error_code
|
|
202
|
+
|
|
203
|
+
if self.error_message is not None:
|
|
204
|
+
result['errorMessage'] = self.error_message
|
|
205
|
+
|
|
206
|
+
if self.event is not None:
|
|
207
|
+
result['event'] = self.event
|
|
208
|
+
|
|
209
|
+
if self.session_id is not None:
|
|
210
|
+
result['sessionId'] = self.session_id
|
|
211
|
+
|
|
212
|
+
if self.task_id is not None:
|
|
213
|
+
result['taskId'] = self.task_id
|
|
214
|
+
|
|
215
|
+
if self.trace_id is not None:
|
|
216
|
+
result['traceId'] = self.trace_id
|
|
217
|
+
|
|
218
|
+
return result
|
|
219
|
+
|
|
220
|
+
def from_map(self, m: dict = None):
|
|
221
|
+
m = m or dict()
|
|
222
|
+
if m.get('errorCode') is not None:
|
|
223
|
+
self.error_code = m.get('errorCode')
|
|
224
|
+
|
|
225
|
+
if m.get('errorMessage') is not None:
|
|
226
|
+
self.error_message = m.get('errorMessage')
|
|
227
|
+
|
|
228
|
+
if m.get('event') is not None:
|
|
229
|
+
self.event = m.get('event')
|
|
230
|
+
|
|
231
|
+
if m.get('sessionId') is not None:
|
|
232
|
+
self.session_id = m.get('sessionId')
|
|
233
|
+
|
|
234
|
+
if m.get('taskId') is not None:
|
|
235
|
+
self.task_id = m.get('taskId')
|
|
236
|
+
|
|
237
|
+
if m.get('traceId') is not None:
|
|
238
|
+
self.trace_id = m.get('traceId')
|
|
239
|
+
|
|
240
|
+
return self
|
|
241
|
+
|
|
@@ -0,0 +1,52 @@
|
|
|
1
|
+
# -*- coding: utf-8 -*-
|
|
2
|
+
# This file is auto-generated, don't edit it. Thanks.
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from darabonba.model import DaraModel
|
|
6
|
+
|
|
7
|
+
class RunHotTopicSummaryShrinkRequest(DaraModel):
|
|
8
|
+
def __init__(
|
|
9
|
+
self,
|
|
10
|
+
hot_topic_version: str = None,
|
|
11
|
+
step_for_custom_summary_style_config_shrink: str = None,
|
|
12
|
+
topic_ids_shrink: str = None,
|
|
13
|
+
):
|
|
14
|
+
# This parameter is required.
|
|
15
|
+
self.hot_topic_version = hot_topic_version
|
|
16
|
+
# This parameter is required.
|
|
17
|
+
self.step_for_custom_summary_style_config_shrink = step_for_custom_summary_style_config_shrink
|
|
18
|
+
# This parameter is required.
|
|
19
|
+
self.topic_ids_shrink = topic_ids_shrink
|
|
20
|
+
|
|
21
|
+
def validate(self):
|
|
22
|
+
pass
|
|
23
|
+
|
|
24
|
+
def to_map(self):
|
|
25
|
+
result = dict()
|
|
26
|
+
_map = super().to_map()
|
|
27
|
+
if _map is not None:
|
|
28
|
+
result = _map
|
|
29
|
+
if self.hot_topic_version is not None:
|
|
30
|
+
result['hotTopicVersion'] = self.hot_topic_version
|
|
31
|
+
|
|
32
|
+
if self.step_for_custom_summary_style_config_shrink is not None:
|
|
33
|
+
result['stepForCustomSummaryStyleConfig'] = self.step_for_custom_summary_style_config_shrink
|
|
34
|
+
|
|
35
|
+
if self.topic_ids_shrink is not None:
|
|
36
|
+
result['topicIds'] = self.topic_ids_shrink
|
|
37
|
+
|
|
38
|
+
return result
|
|
39
|
+
|
|
40
|
+
def from_map(self, m: dict = None):
|
|
41
|
+
m = m or dict()
|
|
42
|
+
if m.get('hotTopicVersion') is not None:
|
|
43
|
+
self.hot_topic_version = m.get('hotTopicVersion')
|
|
44
|
+
|
|
45
|
+
if m.get('stepForCustomSummaryStyleConfig') is not None:
|
|
46
|
+
self.step_for_custom_summary_style_config_shrink = m.get('stepForCustomSummaryStyleConfig')
|
|
47
|
+
|
|
48
|
+
if m.get('topicIds') is not None:
|
|
49
|
+
self.topic_ids_shrink = m.get('topicIds')
|
|
50
|
+
|
|
51
|
+
return self
|
|
52
|
+
|
|
@@ -0,0 +1,59 @@
|
|
|
1
|
+
# -*- coding: utf-8 -*-
|
|
2
|
+
# This file is auto-generated, don't edit it. Thanks.
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from typing import List
|
|
6
|
+
|
|
7
|
+
from darabonba.model import DaraModel
|
|
8
|
+
|
|
9
|
+
class RunMarketingInformationExtractRequest(DaraModel):
|
|
10
|
+
def __init__(
|
|
11
|
+
self,
|
|
12
|
+
custom_prompt: str = None,
|
|
13
|
+
extract_type: str = None,
|
|
14
|
+
model_id: str = None,
|
|
15
|
+
source_materials: List[str] = None,
|
|
16
|
+
):
|
|
17
|
+
self.custom_prompt = custom_prompt
|
|
18
|
+
self.extract_type = extract_type
|
|
19
|
+
self.model_id = model_id
|
|
20
|
+
self.source_materials = source_materials
|
|
21
|
+
|
|
22
|
+
def validate(self):
|
|
23
|
+
pass
|
|
24
|
+
|
|
25
|
+
def to_map(self):
|
|
26
|
+
result = dict()
|
|
27
|
+
_map = super().to_map()
|
|
28
|
+
if _map is not None:
|
|
29
|
+
result = _map
|
|
30
|
+
if self.custom_prompt is not None:
|
|
31
|
+
result['customPrompt'] = self.custom_prompt
|
|
32
|
+
|
|
33
|
+
if self.extract_type is not None:
|
|
34
|
+
result['extractType'] = self.extract_type
|
|
35
|
+
|
|
36
|
+
if self.model_id is not None:
|
|
37
|
+
result['modelId'] = self.model_id
|
|
38
|
+
|
|
39
|
+
if self.source_materials is not None:
|
|
40
|
+
result['sourceMaterials'] = self.source_materials
|
|
41
|
+
|
|
42
|
+
return result
|
|
43
|
+
|
|
44
|
+
def from_map(self, m: dict = None):
|
|
45
|
+
m = m or dict()
|
|
46
|
+
if m.get('customPrompt') is not None:
|
|
47
|
+
self.custom_prompt = m.get('customPrompt')
|
|
48
|
+
|
|
49
|
+
if m.get('extractType') is not None:
|
|
50
|
+
self.extract_type = m.get('extractType')
|
|
51
|
+
|
|
52
|
+
if m.get('modelId') is not None:
|
|
53
|
+
self.model_id = m.get('modelId')
|
|
54
|
+
|
|
55
|
+
if m.get('sourceMaterials') is not None:
|
|
56
|
+
self.source_materials = m.get('sourceMaterials')
|
|
57
|
+
|
|
58
|
+
return self
|
|
59
|
+
|
|
@@ -0,0 +1,54 @@
|
|
|
1
|
+
# -*- coding: utf-8 -*-
|
|
2
|
+
# This file is auto-generated, don't edit it. Thanks.
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from typing import Dict
|
|
6
|
+
|
|
7
|
+
from alibabacloud_quanmiaolightapp20240801 import models as main_models
|
|
8
|
+
from darabonba.model import DaraModel
|
|
9
|
+
|
|
10
|
+
class RunMarketingInformationExtractResponse(DaraModel):
|
|
11
|
+
def __init__(
|
|
12
|
+
self,
|
|
13
|
+
headers: Dict[str, str] = None,
|
|
14
|
+
status_code: int = None,
|
|
15
|
+
body: main_models.RunMarketingInformationExtractResponseBody = None,
|
|
16
|
+
):
|
|
17
|
+
self.headers = headers
|
|
18
|
+
self.status_code = status_code
|
|
19
|
+
self.body = body
|
|
20
|
+
|
|
21
|
+
def validate(self):
|
|
22
|
+
if self.body:
|
|
23
|
+
self.body.validate()
|
|
24
|
+
|
|
25
|
+
def to_map(self):
|
|
26
|
+
result = dict()
|
|
27
|
+
_map = super().to_map()
|
|
28
|
+
if _map is not None:
|
|
29
|
+
result = _map
|
|
30
|
+
if self.headers is not None:
|
|
31
|
+
result['headers'] = self.headers
|
|
32
|
+
|
|
33
|
+
if self.status_code is not None:
|
|
34
|
+
result['statusCode'] = self.status_code
|
|
35
|
+
|
|
36
|
+
if self.body is not None:
|
|
37
|
+
result['body'] = self.body.to_map()
|
|
38
|
+
|
|
39
|
+
return result
|
|
40
|
+
|
|
41
|
+
def from_map(self, m: dict = None):
|
|
42
|
+
m = m or dict()
|
|
43
|
+
if m.get('headers') is not None:
|
|
44
|
+
self.headers = m.get('headers')
|
|
45
|
+
|
|
46
|
+
if m.get('statusCode') is not None:
|
|
47
|
+
self.status_code = m.get('statusCode')
|
|
48
|
+
|
|
49
|
+
if m.get('body') is not None:
|
|
50
|
+
temp_model = main_models.RunMarketingInformationExtractResponseBody()
|
|
51
|
+
self.body = temp_model.from_map(m.get('body'))
|
|
52
|
+
|
|
53
|
+
return self
|
|
54
|
+
|