alibabacloud-quanmiaolightapp20240801 2.13.2__py3-none-any.whl → 2.13.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- alibabacloud_quanmiaolightapp20240801/__init__.py +1 -1
- alibabacloud_quanmiaolightapp20240801/client.py +4448 -3769
- alibabacloud_quanmiaolightapp20240801/models/__init__.py +691 -0
- alibabacloud_quanmiaolightapp20240801/models/_cancel_async_task_request.py +33 -0
- alibabacloud_quanmiaolightapp20240801/models/_cancel_async_task_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_cancel_async_task_response_body.py +74 -0
- alibabacloud_quanmiaolightapp20240801/models/_export_analysis_tag_detail_by_task_id_request.py +52 -0
- alibabacloud_quanmiaolightapp20240801/models/_export_analysis_tag_detail_by_task_id_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_export_analysis_tag_detail_by_task_id_response_body.py +66 -0
- alibabacloud_quanmiaolightapp20240801/models/_export_analysis_tag_detail_by_task_id_shrink_request.py +50 -0
- alibabacloud_quanmiaolightapp20240801/models/_generate_broadcast_news_request.py +34 -0
- alibabacloud_quanmiaolightapp20240801/models/_generate_broadcast_news_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_generate_broadcast_news_response_body.py +303 -0
- alibabacloud_quanmiaolightapp20240801/models/_generate_output_format_request.py +112 -0
- alibabacloud_quanmiaolightapp20240801/models/_generate_output_format_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_generate_output_format_response_body.py +104 -0
- alibabacloud_quanmiaolightapp20240801/models/_generate_output_format_shrink_request.py +66 -0
- alibabacloud_quanmiaolightapp20240801/models/_get_enterprise_voc_analysis_task_request.py +33 -0
- alibabacloud_quanmiaolightapp20240801/models/_get_enterprise_voc_analysis_task_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_get_enterprise_voc_analysis_task_response_body.py +374 -0
- alibabacloud_quanmiaolightapp20240801/models/_get_essay_correction_task_request.py +33 -0
- alibabacloud_quanmiaolightapp20240801/models/_get_essay_correction_task_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_get_essay_correction_task_response_body.py +174 -0
- alibabacloud_quanmiaolightapp20240801/models/_get_file_content_request.py +33 -0
- alibabacloud_quanmiaolightapp20240801/models/_get_file_content_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_get_file_content_response_body.py +104 -0
- alibabacloud_quanmiaolightapp20240801/models/_get_tag_mining_analysis_task_request.py +33 -0
- alibabacloud_quanmiaolightapp20240801/models/_get_tag_mining_analysis_task_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_get_tag_mining_analysis_task_response_body.py +347 -0
- alibabacloud_quanmiaolightapp20240801/models/_get_video_analysis_config_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_get_video_analysis_config_response_body.py +104 -0
- alibabacloud_quanmiaolightapp20240801/models/_get_video_analysis_task_request.py +34 -0
- alibabacloud_quanmiaolightapp20240801/models/_get_video_analysis_task_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_get_video_analysis_task_response_body.py +1620 -0
- alibabacloud_quanmiaolightapp20240801/models/_get_video_detect_shot_config_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_get_video_detect_shot_config_response_body.py +106 -0
- alibabacloud_quanmiaolightapp20240801/models/_get_video_detect_shot_task_request.py +34 -0
- alibabacloud_quanmiaolightapp20240801/models/_get_video_detect_shot_task_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_get_video_detect_shot_task_response_body.py +494 -0
- alibabacloud_quanmiaolightapp20240801/models/_hot_news_recommend_request.py +33 -0
- alibabacloud_quanmiaolightapp20240801/models/_hot_news_recommend_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_hot_news_recommend_response_body.py +180 -0
- alibabacloud_quanmiaolightapp20240801/models/_list_analysis_tag_detail_by_task_id_request.py +50 -0
- alibabacloud_quanmiaolightapp20240801/models/_list_analysis_tag_detail_by_task_id_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_list_analysis_tag_detail_by_task_id_response_body.py +196 -0
- alibabacloud_quanmiaolightapp20240801/models/_list_hot_topic_summaries_request.py +65 -0
- alibabacloud_quanmiaolightapp20240801/models/_list_hot_topic_summaries_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_list_hot_topic_summaries_response_body.py +367 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_enterprise_voc_analysis_request.py +203 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_enterprise_voc_analysis_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_enterprise_voc_analysis_response_body.py +331 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_enterprise_voc_analysis_shrink_request.py +109 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_essay_correction_request.py +81 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_essay_correction_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_essay_correction_response_body.py +241 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_hot_topic_chat_request.py +264 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_hot_topic_chat_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_hot_topic_chat_response_body.py +636 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_hot_topic_chat_shrink_request.py +121 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_hot_topic_summary_request.py +100 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_hot_topic_summary_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_hot_topic_summary_response_body.py +241 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_hot_topic_summary_shrink_request.py +52 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_marketing_information_extract_request.py +59 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_marketing_information_extract_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_marketing_information_extract_response_body.py +232 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_marketing_information_extract_shrink_request.py +57 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_marketing_information_writing_request.py +89 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_marketing_information_writing_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_marketing_information_writing_response_body.py +248 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_network_content_audit_request.py +136 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_network_content_audit_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_network_content_audit_response_body.py +233 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_network_content_audit_shrink_request.py +90 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_ocr_parse_request.py +49 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_ocr_parse_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_ocr_parse_response_body.py +233 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_script_chat_request.py +42 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_script_chat_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_script_chat_response_body.py +248 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_script_continue_request.py +50 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_script_continue_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_script_continue_response_body.py +248 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_script_planning_request.py +82 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_script_planning_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_script_planning_response_body.py +248 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_script_refine_request.py +33 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_script_refine_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_script_refine_response_body.py +290 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_style_writing_request.py +75 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_style_writing_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_style_writing_response_body.py +248 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_style_writing_shrink_request.py +73 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_tag_mining_analysis_request.py +136 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_tag_mining_analysis_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_tag_mining_analysis_response_body.py +233 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_tag_mining_analysis_shrink_request.py +90 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_video_analysis_request.py +600 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_video_analysis_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_video_analysis_response_body.py +1668 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_video_analysis_shrink_request.py +209 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_video_detect_shot_request.py +142 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_video_detect_shot_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_video_detect_shot_response_body.py +363 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_video_detect_shot_shrink_request.py +140 -0
- alibabacloud_quanmiaolightapp20240801/models/_submit_enterprise_voc_analysis_task_request.py +247 -0
- alibabacloud_quanmiaolightapp20240801/models/_submit_enterprise_voc_analysis_task_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_submit_enterprise_voc_analysis_task_response_body.py +104 -0
- alibabacloud_quanmiaolightapp20240801/models/_submit_enterprise_voc_analysis_task_shrink_request.py +113 -0
- alibabacloud_quanmiaolightapp20240801/models/_submit_essay_correction_task_request.py +167 -0
- alibabacloud_quanmiaolightapp20240801/models/_submit_essay_correction_task_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_submit_essay_correction_task_response_body.py +103 -0
- alibabacloud_quanmiaolightapp20240801/models/_submit_essay_correction_task_shrink_request.py +81 -0
- alibabacloud_quanmiaolightapp20240801/models/_submit_tag_mining_analysis_task_request.py +143 -0
- alibabacloud_quanmiaolightapp20240801/models/_submit_tag_mining_analysis_task_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_submit_tag_mining_analysis_task_response_body.py +104 -0
- alibabacloud_quanmiaolightapp20240801/models/_submit_tag_mining_analysis_task_shrink_request.py +97 -0
- alibabacloud_quanmiaolightapp20240801/models/_submit_video_analysis_task_request.py +593 -0
- alibabacloud_quanmiaolightapp20240801/models/_submit_video_analysis_task_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_submit_video_analysis_task_response_body.py +103 -0
- alibabacloud_quanmiaolightapp20240801/models/_submit_video_analysis_task_shrink_request.py +202 -0
- alibabacloud_quanmiaolightapp20240801/models/_submit_video_detect_shot_task_request.py +148 -0
- alibabacloud_quanmiaolightapp20240801/models/_submit_video_detect_shot_task_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_submit_video_detect_shot_task_response_body.py +104 -0
- alibabacloud_quanmiaolightapp20240801/models/_submit_video_detect_shot_task_shrink_request.py +146 -0
- alibabacloud_quanmiaolightapp20240801/models/_update_video_analysis_config_request.py +34 -0
- alibabacloud_quanmiaolightapp20240801/models/_update_video_analysis_config_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_update_video_analysis_config_response_body.py +66 -0
- alibabacloud_quanmiaolightapp20240801/models/_update_video_analysis_task_request.py +43 -0
- alibabacloud_quanmiaolightapp20240801/models/_update_video_analysis_task_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_update_video_analysis_task_response_body.py +119 -0
- alibabacloud_quanmiaolightapp20240801/models/_update_video_analysis_tasks_request.py +45 -0
- alibabacloud_quanmiaolightapp20240801/models/_update_video_analysis_tasks_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_update_video_analysis_tasks_response_body.py +136 -0
- alibabacloud_quanmiaolightapp20240801/models/_update_video_analysis_tasks_shrink_request.py +43 -0
- alibabacloud_quanmiaolightapp20240801/models/_update_video_detect_shot_config_request.py +34 -0
- alibabacloud_quanmiaolightapp20240801/models/_update_video_detect_shot_config_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_update_video_detect_shot_config_response_body.py +66 -0
- alibabacloud_quanmiaolightapp20240801/models/_update_video_detect_shot_task_request.py +43 -0
- alibabacloud_quanmiaolightapp20240801/models/_update_video_detect_shot_task_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_update_video_detect_shot_task_response_body.py +120 -0
- {alibabacloud_quanmiaolightapp20240801-2.13.2.dist-info → alibabacloud_quanmiaolightapp20240801-2.13.3.dist-info}/METADATA +7 -7
- alibabacloud_quanmiaolightapp20240801-2.13.3.dist-info/RECORD +146 -0
- alibabacloud_quanmiaolightapp20240801/models.py +0 -16578
- alibabacloud_quanmiaolightapp20240801-2.13.2.dist-info/RECORD +0 -8
- {alibabacloud_quanmiaolightapp20240801-2.13.2.dist-info → alibabacloud_quanmiaolightapp20240801-2.13.3.dist-info}/LICENSE +0 -0
- {alibabacloud_quanmiaolightapp20240801-2.13.2.dist-info → alibabacloud_quanmiaolightapp20240801-2.13.3.dist-info}/WHEEL +0 -0
- {alibabacloud_quanmiaolightapp20240801-2.13.2.dist-info → alibabacloud_quanmiaolightapp20240801-2.13.3.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,241 @@
|
|
|
1
|
+
# -*- coding: utf-8 -*-
|
|
2
|
+
# This file is auto-generated, don't edit it. Thanks.
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from alibabacloud_quanmiaolightapp20240801 import models as main_models
|
|
6
|
+
from darabonba.model import DaraModel
|
|
7
|
+
|
|
8
|
+
class RunEssayCorrectionResponseBody(DaraModel):
|
|
9
|
+
def __init__(
|
|
10
|
+
self,
|
|
11
|
+
header: main_models.RunEssayCorrectionResponseBodyHeader = None,
|
|
12
|
+
payload: main_models.RunEssayCorrectionResponseBodyPayload = None,
|
|
13
|
+
request_id: str = None,
|
|
14
|
+
):
|
|
15
|
+
self.header = header
|
|
16
|
+
self.payload = payload
|
|
17
|
+
# Id of the request
|
|
18
|
+
self.request_id = request_id
|
|
19
|
+
|
|
20
|
+
def validate(self):
|
|
21
|
+
if self.header:
|
|
22
|
+
self.header.validate()
|
|
23
|
+
if self.payload:
|
|
24
|
+
self.payload.validate()
|
|
25
|
+
|
|
26
|
+
def to_map(self):
|
|
27
|
+
result = dict()
|
|
28
|
+
_map = super().to_map()
|
|
29
|
+
if _map is not None:
|
|
30
|
+
result = _map
|
|
31
|
+
if self.header is not None:
|
|
32
|
+
result['header'] = self.header.to_map()
|
|
33
|
+
|
|
34
|
+
if self.payload is not None:
|
|
35
|
+
result['payload'] = self.payload.to_map()
|
|
36
|
+
|
|
37
|
+
if self.request_id is not None:
|
|
38
|
+
result['requestId'] = self.request_id
|
|
39
|
+
|
|
40
|
+
return result
|
|
41
|
+
|
|
42
|
+
def from_map(self, m: dict = None):
|
|
43
|
+
m = m or dict()
|
|
44
|
+
if m.get('header') is not None:
|
|
45
|
+
temp_model = main_models.RunEssayCorrectionResponseBodyHeader()
|
|
46
|
+
self.header = temp_model.from_map(m.get('header'))
|
|
47
|
+
|
|
48
|
+
if m.get('payload') is not None:
|
|
49
|
+
temp_model = main_models.RunEssayCorrectionResponseBodyPayload()
|
|
50
|
+
self.payload = temp_model.from_map(m.get('payload'))
|
|
51
|
+
|
|
52
|
+
if m.get('requestId') is not None:
|
|
53
|
+
self.request_id = m.get('requestId')
|
|
54
|
+
|
|
55
|
+
return self
|
|
56
|
+
|
|
57
|
+
class RunEssayCorrectionResponseBodyPayload(DaraModel):
|
|
58
|
+
def __init__(
|
|
59
|
+
self,
|
|
60
|
+
output: main_models.RunEssayCorrectionResponseBodyPayloadOutput = None,
|
|
61
|
+
usage: main_models.RunEssayCorrectionResponseBodyPayloadUsage = None,
|
|
62
|
+
):
|
|
63
|
+
self.output = output
|
|
64
|
+
self.usage = usage
|
|
65
|
+
|
|
66
|
+
def validate(self):
|
|
67
|
+
if self.output:
|
|
68
|
+
self.output.validate()
|
|
69
|
+
if self.usage:
|
|
70
|
+
self.usage.validate()
|
|
71
|
+
|
|
72
|
+
def to_map(self):
|
|
73
|
+
result = dict()
|
|
74
|
+
_map = super().to_map()
|
|
75
|
+
if _map is not None:
|
|
76
|
+
result = _map
|
|
77
|
+
if self.output is not None:
|
|
78
|
+
result['output'] = self.output.to_map()
|
|
79
|
+
|
|
80
|
+
if self.usage is not None:
|
|
81
|
+
result['usage'] = self.usage.to_map()
|
|
82
|
+
|
|
83
|
+
return result
|
|
84
|
+
|
|
85
|
+
def from_map(self, m: dict = None):
|
|
86
|
+
m = m or dict()
|
|
87
|
+
if m.get('output') is not None:
|
|
88
|
+
temp_model = main_models.RunEssayCorrectionResponseBodyPayloadOutput()
|
|
89
|
+
self.output = temp_model.from_map(m.get('output'))
|
|
90
|
+
|
|
91
|
+
if m.get('usage') is not None:
|
|
92
|
+
temp_model = main_models.RunEssayCorrectionResponseBodyPayloadUsage()
|
|
93
|
+
self.usage = temp_model.from_map(m.get('usage'))
|
|
94
|
+
|
|
95
|
+
return self
|
|
96
|
+
|
|
97
|
+
class RunEssayCorrectionResponseBodyPayloadUsage(DaraModel):
|
|
98
|
+
def __init__(
|
|
99
|
+
self,
|
|
100
|
+
input_tokens: int = None,
|
|
101
|
+
output_tokens: int = None,
|
|
102
|
+
total_tokens: int = None,
|
|
103
|
+
):
|
|
104
|
+
self.input_tokens = input_tokens
|
|
105
|
+
self.output_tokens = output_tokens
|
|
106
|
+
self.total_tokens = total_tokens
|
|
107
|
+
|
|
108
|
+
def validate(self):
|
|
109
|
+
pass
|
|
110
|
+
|
|
111
|
+
def to_map(self):
|
|
112
|
+
result = dict()
|
|
113
|
+
_map = super().to_map()
|
|
114
|
+
if _map is not None:
|
|
115
|
+
result = _map
|
|
116
|
+
if self.input_tokens is not None:
|
|
117
|
+
result['inputTokens'] = self.input_tokens
|
|
118
|
+
|
|
119
|
+
if self.output_tokens is not None:
|
|
120
|
+
result['outputTokens'] = self.output_tokens
|
|
121
|
+
|
|
122
|
+
if self.total_tokens is not None:
|
|
123
|
+
result['totalTokens'] = self.total_tokens
|
|
124
|
+
|
|
125
|
+
return result
|
|
126
|
+
|
|
127
|
+
def from_map(self, m: dict = None):
|
|
128
|
+
m = m or dict()
|
|
129
|
+
if m.get('inputTokens') is not None:
|
|
130
|
+
self.input_tokens = m.get('inputTokens')
|
|
131
|
+
|
|
132
|
+
if m.get('outputTokens') is not None:
|
|
133
|
+
self.output_tokens = m.get('outputTokens')
|
|
134
|
+
|
|
135
|
+
if m.get('totalTokens') is not None:
|
|
136
|
+
self.total_tokens = m.get('totalTokens')
|
|
137
|
+
|
|
138
|
+
return self
|
|
139
|
+
|
|
140
|
+
class RunEssayCorrectionResponseBodyPayloadOutput(DaraModel):
|
|
141
|
+
def __init__(
|
|
142
|
+
self,
|
|
143
|
+
score: int = None,
|
|
144
|
+
text: str = None,
|
|
145
|
+
):
|
|
146
|
+
self.score = score
|
|
147
|
+
self.text = text
|
|
148
|
+
|
|
149
|
+
def validate(self):
|
|
150
|
+
pass
|
|
151
|
+
|
|
152
|
+
def to_map(self):
|
|
153
|
+
result = dict()
|
|
154
|
+
_map = super().to_map()
|
|
155
|
+
if _map is not None:
|
|
156
|
+
result = _map
|
|
157
|
+
if self.score is not None:
|
|
158
|
+
result['score'] = self.score
|
|
159
|
+
|
|
160
|
+
if self.text is not None:
|
|
161
|
+
result['text'] = self.text
|
|
162
|
+
|
|
163
|
+
return result
|
|
164
|
+
|
|
165
|
+
def from_map(self, m: dict = None):
|
|
166
|
+
m = m or dict()
|
|
167
|
+
if m.get('score') is not None:
|
|
168
|
+
self.score = m.get('score')
|
|
169
|
+
|
|
170
|
+
if m.get('text') is not None:
|
|
171
|
+
self.text = m.get('text')
|
|
172
|
+
|
|
173
|
+
return self
|
|
174
|
+
|
|
175
|
+
class RunEssayCorrectionResponseBodyHeader(DaraModel):
|
|
176
|
+
def __init__(
|
|
177
|
+
self,
|
|
178
|
+
error_code: str = None,
|
|
179
|
+
error_message: str = None,
|
|
180
|
+
event: str = None,
|
|
181
|
+
session_id: str = None,
|
|
182
|
+
task_id: str = None,
|
|
183
|
+
trace_id: str = None,
|
|
184
|
+
):
|
|
185
|
+
self.error_code = error_code
|
|
186
|
+
self.error_message = error_message
|
|
187
|
+
self.event = event
|
|
188
|
+
self.session_id = session_id
|
|
189
|
+
self.task_id = task_id
|
|
190
|
+
self.trace_id = trace_id
|
|
191
|
+
|
|
192
|
+
def validate(self):
|
|
193
|
+
pass
|
|
194
|
+
|
|
195
|
+
def to_map(self):
|
|
196
|
+
result = dict()
|
|
197
|
+
_map = super().to_map()
|
|
198
|
+
if _map is not None:
|
|
199
|
+
result = _map
|
|
200
|
+
if self.error_code is not None:
|
|
201
|
+
result['errorCode'] = self.error_code
|
|
202
|
+
|
|
203
|
+
if self.error_message is not None:
|
|
204
|
+
result['errorMessage'] = self.error_message
|
|
205
|
+
|
|
206
|
+
if self.event is not None:
|
|
207
|
+
result['event'] = self.event
|
|
208
|
+
|
|
209
|
+
if self.session_id is not None:
|
|
210
|
+
result['sessionId'] = self.session_id
|
|
211
|
+
|
|
212
|
+
if self.task_id is not None:
|
|
213
|
+
result['taskId'] = self.task_id
|
|
214
|
+
|
|
215
|
+
if self.trace_id is not None:
|
|
216
|
+
result['traceId'] = self.trace_id
|
|
217
|
+
|
|
218
|
+
return result
|
|
219
|
+
|
|
220
|
+
def from_map(self, m: dict = None):
|
|
221
|
+
m = m or dict()
|
|
222
|
+
if m.get('errorCode') is not None:
|
|
223
|
+
self.error_code = m.get('errorCode')
|
|
224
|
+
|
|
225
|
+
if m.get('errorMessage') is not None:
|
|
226
|
+
self.error_message = m.get('errorMessage')
|
|
227
|
+
|
|
228
|
+
if m.get('event') is not None:
|
|
229
|
+
self.event = m.get('event')
|
|
230
|
+
|
|
231
|
+
if m.get('sessionId') is not None:
|
|
232
|
+
self.session_id = m.get('sessionId')
|
|
233
|
+
|
|
234
|
+
if m.get('taskId') is not None:
|
|
235
|
+
self.task_id = m.get('taskId')
|
|
236
|
+
|
|
237
|
+
if m.get('traceId') is not None:
|
|
238
|
+
self.trace_id = m.get('traceId')
|
|
239
|
+
|
|
240
|
+
return self
|
|
241
|
+
|
|
@@ -0,0 +1,264 @@
|
|
|
1
|
+
# -*- coding: utf-8 -*-
|
|
2
|
+
# This file is auto-generated, don't edit it. Thanks.
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from typing import List
|
|
6
|
+
|
|
7
|
+
from alibabacloud_quanmiaolightapp20240801 import models as main_models
|
|
8
|
+
from darabonba.model import DaraModel
|
|
9
|
+
|
|
10
|
+
class RunHotTopicChatRequest(DaraModel):
|
|
11
|
+
def __init__(
|
|
12
|
+
self,
|
|
13
|
+
category: str = None,
|
|
14
|
+
generate_options: List[str] = None,
|
|
15
|
+
hot_topic_version: str = None,
|
|
16
|
+
hot_topics: List[str] = None,
|
|
17
|
+
image_count: int = None,
|
|
18
|
+
messages: List[main_models.RunHotTopicChatRequestMessages] = None,
|
|
19
|
+
model_custom_prompt_template: str = None,
|
|
20
|
+
model_id: str = None,
|
|
21
|
+
original_session_id: str = None,
|
|
22
|
+
prompt: str = None,
|
|
23
|
+
step_for_broadcast_content_config: main_models.RunHotTopicChatRequestStepForBroadcastContentConfig = None,
|
|
24
|
+
task_id: str = None,
|
|
25
|
+
):
|
|
26
|
+
self.category = category
|
|
27
|
+
self.generate_options = generate_options
|
|
28
|
+
self.hot_topic_version = hot_topic_version
|
|
29
|
+
self.hot_topics = hot_topics
|
|
30
|
+
self.image_count = image_count
|
|
31
|
+
self.messages = messages
|
|
32
|
+
self.model_custom_prompt_template = model_custom_prompt_template
|
|
33
|
+
self.model_id = model_id
|
|
34
|
+
self.original_session_id = original_session_id
|
|
35
|
+
self.prompt = prompt
|
|
36
|
+
self.step_for_broadcast_content_config = step_for_broadcast_content_config
|
|
37
|
+
self.task_id = task_id
|
|
38
|
+
|
|
39
|
+
def validate(self):
|
|
40
|
+
if self.messages:
|
|
41
|
+
for v1 in self.messages:
|
|
42
|
+
if v1:
|
|
43
|
+
v1.validate()
|
|
44
|
+
if self.step_for_broadcast_content_config:
|
|
45
|
+
self.step_for_broadcast_content_config.validate()
|
|
46
|
+
|
|
47
|
+
def to_map(self):
|
|
48
|
+
result = dict()
|
|
49
|
+
_map = super().to_map()
|
|
50
|
+
if _map is not None:
|
|
51
|
+
result = _map
|
|
52
|
+
if self.category is not None:
|
|
53
|
+
result['category'] = self.category
|
|
54
|
+
|
|
55
|
+
if self.generate_options is not None:
|
|
56
|
+
result['generateOptions'] = self.generate_options
|
|
57
|
+
|
|
58
|
+
if self.hot_topic_version is not None:
|
|
59
|
+
result['hotTopicVersion'] = self.hot_topic_version
|
|
60
|
+
|
|
61
|
+
if self.hot_topics is not None:
|
|
62
|
+
result['hotTopics'] = self.hot_topics
|
|
63
|
+
|
|
64
|
+
if self.image_count is not None:
|
|
65
|
+
result['imageCount'] = self.image_count
|
|
66
|
+
|
|
67
|
+
result['messages'] = []
|
|
68
|
+
if self.messages is not None:
|
|
69
|
+
for k1 in self.messages:
|
|
70
|
+
result['messages'].append(k1.to_map() if k1 else None)
|
|
71
|
+
|
|
72
|
+
if self.model_custom_prompt_template is not None:
|
|
73
|
+
result['modelCustomPromptTemplate'] = self.model_custom_prompt_template
|
|
74
|
+
|
|
75
|
+
if self.model_id is not None:
|
|
76
|
+
result['modelId'] = self.model_id
|
|
77
|
+
|
|
78
|
+
if self.original_session_id is not None:
|
|
79
|
+
result['originalSessionId'] = self.original_session_id
|
|
80
|
+
|
|
81
|
+
if self.prompt is not None:
|
|
82
|
+
result['prompt'] = self.prompt
|
|
83
|
+
|
|
84
|
+
if self.step_for_broadcast_content_config is not None:
|
|
85
|
+
result['stepForBroadcastContentConfig'] = self.step_for_broadcast_content_config.to_map()
|
|
86
|
+
|
|
87
|
+
if self.task_id is not None:
|
|
88
|
+
result['taskId'] = self.task_id
|
|
89
|
+
|
|
90
|
+
return result
|
|
91
|
+
|
|
92
|
+
def from_map(self, m: dict = None):
|
|
93
|
+
m = m or dict()
|
|
94
|
+
if m.get('category') is not None:
|
|
95
|
+
self.category = m.get('category')
|
|
96
|
+
|
|
97
|
+
if m.get('generateOptions') is not None:
|
|
98
|
+
self.generate_options = m.get('generateOptions')
|
|
99
|
+
|
|
100
|
+
if m.get('hotTopicVersion') is not None:
|
|
101
|
+
self.hot_topic_version = m.get('hotTopicVersion')
|
|
102
|
+
|
|
103
|
+
if m.get('hotTopics') is not None:
|
|
104
|
+
self.hot_topics = m.get('hotTopics')
|
|
105
|
+
|
|
106
|
+
if m.get('imageCount') is not None:
|
|
107
|
+
self.image_count = m.get('imageCount')
|
|
108
|
+
|
|
109
|
+
self.messages = []
|
|
110
|
+
if m.get('messages') is not None:
|
|
111
|
+
for k1 in m.get('messages'):
|
|
112
|
+
temp_model = main_models.RunHotTopicChatRequestMessages()
|
|
113
|
+
self.messages.append(temp_model.from_map(k1))
|
|
114
|
+
|
|
115
|
+
if m.get('modelCustomPromptTemplate') is not None:
|
|
116
|
+
self.model_custom_prompt_template = m.get('modelCustomPromptTemplate')
|
|
117
|
+
|
|
118
|
+
if m.get('modelId') is not None:
|
|
119
|
+
self.model_id = m.get('modelId')
|
|
120
|
+
|
|
121
|
+
if m.get('originalSessionId') is not None:
|
|
122
|
+
self.original_session_id = m.get('originalSessionId')
|
|
123
|
+
|
|
124
|
+
if m.get('prompt') is not None:
|
|
125
|
+
self.prompt = m.get('prompt')
|
|
126
|
+
|
|
127
|
+
if m.get('stepForBroadcastContentConfig') is not None:
|
|
128
|
+
temp_model = main_models.RunHotTopicChatRequestStepForBroadcastContentConfig()
|
|
129
|
+
self.step_for_broadcast_content_config = temp_model.from_map(m.get('stepForBroadcastContentConfig'))
|
|
130
|
+
|
|
131
|
+
if m.get('taskId') is not None:
|
|
132
|
+
self.task_id = m.get('taskId')
|
|
133
|
+
|
|
134
|
+
return self
|
|
135
|
+
|
|
136
|
+
class RunHotTopicChatRequestStepForBroadcastContentConfig(DaraModel):
|
|
137
|
+
def __init__(
|
|
138
|
+
self,
|
|
139
|
+
categories: List[str] = None,
|
|
140
|
+
custom_hot_value_weights: List[main_models.RunHotTopicChatRequestStepForBroadcastContentConfigCustomHotValueWeights] = None,
|
|
141
|
+
topic_count: int = None,
|
|
142
|
+
):
|
|
143
|
+
self.categories = categories
|
|
144
|
+
self.custom_hot_value_weights = custom_hot_value_weights
|
|
145
|
+
self.topic_count = topic_count
|
|
146
|
+
|
|
147
|
+
def validate(self):
|
|
148
|
+
if self.custom_hot_value_weights:
|
|
149
|
+
for v1 in self.custom_hot_value_weights:
|
|
150
|
+
if v1:
|
|
151
|
+
v1.validate()
|
|
152
|
+
|
|
153
|
+
def to_map(self):
|
|
154
|
+
result = dict()
|
|
155
|
+
_map = super().to_map()
|
|
156
|
+
if _map is not None:
|
|
157
|
+
result = _map
|
|
158
|
+
if self.categories is not None:
|
|
159
|
+
result['categories'] = self.categories
|
|
160
|
+
|
|
161
|
+
result['customHotValueWeights'] = []
|
|
162
|
+
if self.custom_hot_value_weights is not None:
|
|
163
|
+
for k1 in self.custom_hot_value_weights:
|
|
164
|
+
result['customHotValueWeights'].append(k1.to_map() if k1 else None)
|
|
165
|
+
|
|
166
|
+
if self.topic_count is not None:
|
|
167
|
+
result['topicCount'] = self.topic_count
|
|
168
|
+
|
|
169
|
+
return result
|
|
170
|
+
|
|
171
|
+
def from_map(self, m: dict = None):
|
|
172
|
+
m = m or dict()
|
|
173
|
+
if m.get('categories') is not None:
|
|
174
|
+
self.categories = m.get('categories')
|
|
175
|
+
|
|
176
|
+
self.custom_hot_value_weights = []
|
|
177
|
+
if m.get('customHotValueWeights') is not None:
|
|
178
|
+
for k1 in m.get('customHotValueWeights'):
|
|
179
|
+
temp_model = main_models.RunHotTopicChatRequestStepForBroadcastContentConfigCustomHotValueWeights()
|
|
180
|
+
self.custom_hot_value_weights.append(temp_model.from_map(k1))
|
|
181
|
+
|
|
182
|
+
if m.get('topicCount') is not None:
|
|
183
|
+
self.topic_count = m.get('topicCount')
|
|
184
|
+
|
|
185
|
+
return self
|
|
186
|
+
|
|
187
|
+
class RunHotTopicChatRequestStepForBroadcastContentConfigCustomHotValueWeights(DaraModel):
|
|
188
|
+
def __init__(
|
|
189
|
+
self,
|
|
190
|
+
dimension: str = None,
|
|
191
|
+
weight: int = None,
|
|
192
|
+
):
|
|
193
|
+
self.dimension = dimension
|
|
194
|
+
self.weight = weight
|
|
195
|
+
|
|
196
|
+
def validate(self):
|
|
197
|
+
pass
|
|
198
|
+
|
|
199
|
+
def to_map(self):
|
|
200
|
+
result = dict()
|
|
201
|
+
_map = super().to_map()
|
|
202
|
+
if _map is not None:
|
|
203
|
+
result = _map
|
|
204
|
+
if self.dimension is not None:
|
|
205
|
+
result['dimension'] = self.dimension
|
|
206
|
+
|
|
207
|
+
if self.weight is not None:
|
|
208
|
+
result['weight'] = self.weight
|
|
209
|
+
|
|
210
|
+
return result
|
|
211
|
+
|
|
212
|
+
def from_map(self, m: dict = None):
|
|
213
|
+
m = m or dict()
|
|
214
|
+
if m.get('dimension') is not None:
|
|
215
|
+
self.dimension = m.get('dimension')
|
|
216
|
+
|
|
217
|
+
if m.get('weight') is not None:
|
|
218
|
+
self.weight = m.get('weight')
|
|
219
|
+
|
|
220
|
+
return self
|
|
221
|
+
|
|
222
|
+
class RunHotTopicChatRequestMessages(DaraModel):
|
|
223
|
+
def __init__(
|
|
224
|
+
self,
|
|
225
|
+
content: str = None,
|
|
226
|
+
create_time: str = None,
|
|
227
|
+
role: str = None,
|
|
228
|
+
):
|
|
229
|
+
self.content = content
|
|
230
|
+
self.create_time = create_time
|
|
231
|
+
self.role = role
|
|
232
|
+
|
|
233
|
+
def validate(self):
|
|
234
|
+
pass
|
|
235
|
+
|
|
236
|
+
def to_map(self):
|
|
237
|
+
result = dict()
|
|
238
|
+
_map = super().to_map()
|
|
239
|
+
if _map is not None:
|
|
240
|
+
result = _map
|
|
241
|
+
if self.content is not None:
|
|
242
|
+
result['content'] = self.content
|
|
243
|
+
|
|
244
|
+
if self.create_time is not None:
|
|
245
|
+
result['createTime'] = self.create_time
|
|
246
|
+
|
|
247
|
+
if self.role is not None:
|
|
248
|
+
result['role'] = self.role
|
|
249
|
+
|
|
250
|
+
return result
|
|
251
|
+
|
|
252
|
+
def from_map(self, m: dict = None):
|
|
253
|
+
m = m or dict()
|
|
254
|
+
if m.get('content') is not None:
|
|
255
|
+
self.content = m.get('content')
|
|
256
|
+
|
|
257
|
+
if m.get('createTime') is not None:
|
|
258
|
+
self.create_time = m.get('createTime')
|
|
259
|
+
|
|
260
|
+
if m.get('role') is not None:
|
|
261
|
+
self.role = m.get('role')
|
|
262
|
+
|
|
263
|
+
return self
|
|
264
|
+
|
|
@@ -0,0 +1,54 @@
|
|
|
1
|
+
# -*- coding: utf-8 -*-
|
|
2
|
+
# This file is auto-generated, don't edit it. Thanks.
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from typing import Dict
|
|
6
|
+
|
|
7
|
+
from alibabacloud_quanmiaolightapp20240801 import models as main_models
|
|
8
|
+
from darabonba.model import DaraModel
|
|
9
|
+
|
|
10
|
+
class RunHotTopicChatResponse(DaraModel):
|
|
11
|
+
def __init__(
|
|
12
|
+
self,
|
|
13
|
+
headers: Dict[str, str] = None,
|
|
14
|
+
status_code: int = None,
|
|
15
|
+
body: main_models.RunHotTopicChatResponseBody = None,
|
|
16
|
+
):
|
|
17
|
+
self.headers = headers
|
|
18
|
+
self.status_code = status_code
|
|
19
|
+
self.body = body
|
|
20
|
+
|
|
21
|
+
def validate(self):
|
|
22
|
+
if self.body:
|
|
23
|
+
self.body.validate()
|
|
24
|
+
|
|
25
|
+
def to_map(self):
|
|
26
|
+
result = dict()
|
|
27
|
+
_map = super().to_map()
|
|
28
|
+
if _map is not None:
|
|
29
|
+
result = _map
|
|
30
|
+
if self.headers is not None:
|
|
31
|
+
result['headers'] = self.headers
|
|
32
|
+
|
|
33
|
+
if self.status_code is not None:
|
|
34
|
+
result['statusCode'] = self.status_code
|
|
35
|
+
|
|
36
|
+
if self.body is not None:
|
|
37
|
+
result['body'] = self.body.to_map()
|
|
38
|
+
|
|
39
|
+
return result
|
|
40
|
+
|
|
41
|
+
def from_map(self, m: dict = None):
|
|
42
|
+
m = m or dict()
|
|
43
|
+
if m.get('headers') is not None:
|
|
44
|
+
self.headers = m.get('headers')
|
|
45
|
+
|
|
46
|
+
if m.get('statusCode') is not None:
|
|
47
|
+
self.status_code = m.get('statusCode')
|
|
48
|
+
|
|
49
|
+
if m.get('body') is not None:
|
|
50
|
+
temp_model = main_models.RunHotTopicChatResponseBody()
|
|
51
|
+
self.body = temp_model.from_map(m.get('body'))
|
|
52
|
+
|
|
53
|
+
return self
|
|
54
|
+
|