alibabacloud-quanmiaolightapp20240801 2.13.2__py3-none-any.whl → 2.13.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- alibabacloud_quanmiaolightapp20240801/__init__.py +1 -1
- alibabacloud_quanmiaolightapp20240801/client.py +4448 -3769
- alibabacloud_quanmiaolightapp20240801/models/__init__.py +691 -0
- alibabacloud_quanmiaolightapp20240801/models/_cancel_async_task_request.py +33 -0
- alibabacloud_quanmiaolightapp20240801/models/_cancel_async_task_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_cancel_async_task_response_body.py +74 -0
- alibabacloud_quanmiaolightapp20240801/models/_export_analysis_tag_detail_by_task_id_request.py +52 -0
- alibabacloud_quanmiaolightapp20240801/models/_export_analysis_tag_detail_by_task_id_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_export_analysis_tag_detail_by_task_id_response_body.py +66 -0
- alibabacloud_quanmiaolightapp20240801/models/_export_analysis_tag_detail_by_task_id_shrink_request.py +50 -0
- alibabacloud_quanmiaolightapp20240801/models/_generate_broadcast_news_request.py +34 -0
- alibabacloud_quanmiaolightapp20240801/models/_generate_broadcast_news_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_generate_broadcast_news_response_body.py +303 -0
- alibabacloud_quanmiaolightapp20240801/models/_generate_output_format_request.py +112 -0
- alibabacloud_quanmiaolightapp20240801/models/_generate_output_format_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_generate_output_format_response_body.py +104 -0
- alibabacloud_quanmiaolightapp20240801/models/_generate_output_format_shrink_request.py +66 -0
- alibabacloud_quanmiaolightapp20240801/models/_get_enterprise_voc_analysis_task_request.py +33 -0
- alibabacloud_quanmiaolightapp20240801/models/_get_enterprise_voc_analysis_task_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_get_enterprise_voc_analysis_task_response_body.py +374 -0
- alibabacloud_quanmiaolightapp20240801/models/_get_essay_correction_task_request.py +33 -0
- alibabacloud_quanmiaolightapp20240801/models/_get_essay_correction_task_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_get_essay_correction_task_response_body.py +174 -0
- alibabacloud_quanmiaolightapp20240801/models/_get_file_content_request.py +33 -0
- alibabacloud_quanmiaolightapp20240801/models/_get_file_content_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_get_file_content_response_body.py +104 -0
- alibabacloud_quanmiaolightapp20240801/models/_get_tag_mining_analysis_task_request.py +33 -0
- alibabacloud_quanmiaolightapp20240801/models/_get_tag_mining_analysis_task_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_get_tag_mining_analysis_task_response_body.py +347 -0
- alibabacloud_quanmiaolightapp20240801/models/_get_video_analysis_config_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_get_video_analysis_config_response_body.py +104 -0
- alibabacloud_quanmiaolightapp20240801/models/_get_video_analysis_task_request.py +34 -0
- alibabacloud_quanmiaolightapp20240801/models/_get_video_analysis_task_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_get_video_analysis_task_response_body.py +1620 -0
- alibabacloud_quanmiaolightapp20240801/models/_get_video_detect_shot_config_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_get_video_detect_shot_config_response_body.py +106 -0
- alibabacloud_quanmiaolightapp20240801/models/_get_video_detect_shot_task_request.py +34 -0
- alibabacloud_quanmiaolightapp20240801/models/_get_video_detect_shot_task_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_get_video_detect_shot_task_response_body.py +494 -0
- alibabacloud_quanmiaolightapp20240801/models/_hot_news_recommend_request.py +33 -0
- alibabacloud_quanmiaolightapp20240801/models/_hot_news_recommend_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_hot_news_recommend_response_body.py +180 -0
- alibabacloud_quanmiaolightapp20240801/models/_list_analysis_tag_detail_by_task_id_request.py +50 -0
- alibabacloud_quanmiaolightapp20240801/models/_list_analysis_tag_detail_by_task_id_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_list_analysis_tag_detail_by_task_id_response_body.py +196 -0
- alibabacloud_quanmiaolightapp20240801/models/_list_hot_topic_summaries_request.py +65 -0
- alibabacloud_quanmiaolightapp20240801/models/_list_hot_topic_summaries_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_list_hot_topic_summaries_response_body.py +367 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_enterprise_voc_analysis_request.py +203 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_enterprise_voc_analysis_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_enterprise_voc_analysis_response_body.py +331 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_enterprise_voc_analysis_shrink_request.py +109 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_essay_correction_request.py +81 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_essay_correction_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_essay_correction_response_body.py +241 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_hot_topic_chat_request.py +264 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_hot_topic_chat_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_hot_topic_chat_response_body.py +636 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_hot_topic_chat_shrink_request.py +121 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_hot_topic_summary_request.py +100 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_hot_topic_summary_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_hot_topic_summary_response_body.py +241 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_hot_topic_summary_shrink_request.py +52 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_marketing_information_extract_request.py +59 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_marketing_information_extract_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_marketing_information_extract_response_body.py +232 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_marketing_information_extract_shrink_request.py +57 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_marketing_information_writing_request.py +89 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_marketing_information_writing_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_marketing_information_writing_response_body.py +248 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_network_content_audit_request.py +136 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_network_content_audit_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_network_content_audit_response_body.py +233 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_network_content_audit_shrink_request.py +90 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_ocr_parse_request.py +49 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_ocr_parse_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_ocr_parse_response_body.py +233 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_script_chat_request.py +42 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_script_chat_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_script_chat_response_body.py +248 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_script_continue_request.py +50 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_script_continue_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_script_continue_response_body.py +248 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_script_planning_request.py +82 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_script_planning_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_script_planning_response_body.py +248 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_script_refine_request.py +33 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_script_refine_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_script_refine_response_body.py +290 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_style_writing_request.py +75 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_style_writing_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_style_writing_response_body.py +248 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_style_writing_shrink_request.py +73 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_tag_mining_analysis_request.py +136 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_tag_mining_analysis_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_tag_mining_analysis_response_body.py +233 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_tag_mining_analysis_shrink_request.py +90 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_video_analysis_request.py +600 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_video_analysis_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_video_analysis_response_body.py +1668 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_video_analysis_shrink_request.py +209 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_video_detect_shot_request.py +142 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_video_detect_shot_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_video_detect_shot_response_body.py +363 -0
- alibabacloud_quanmiaolightapp20240801/models/_run_video_detect_shot_shrink_request.py +140 -0
- alibabacloud_quanmiaolightapp20240801/models/_submit_enterprise_voc_analysis_task_request.py +247 -0
- alibabacloud_quanmiaolightapp20240801/models/_submit_enterprise_voc_analysis_task_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_submit_enterprise_voc_analysis_task_response_body.py +104 -0
- alibabacloud_quanmiaolightapp20240801/models/_submit_enterprise_voc_analysis_task_shrink_request.py +113 -0
- alibabacloud_quanmiaolightapp20240801/models/_submit_essay_correction_task_request.py +167 -0
- alibabacloud_quanmiaolightapp20240801/models/_submit_essay_correction_task_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_submit_essay_correction_task_response_body.py +103 -0
- alibabacloud_quanmiaolightapp20240801/models/_submit_essay_correction_task_shrink_request.py +81 -0
- alibabacloud_quanmiaolightapp20240801/models/_submit_tag_mining_analysis_task_request.py +143 -0
- alibabacloud_quanmiaolightapp20240801/models/_submit_tag_mining_analysis_task_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_submit_tag_mining_analysis_task_response_body.py +104 -0
- alibabacloud_quanmiaolightapp20240801/models/_submit_tag_mining_analysis_task_shrink_request.py +97 -0
- alibabacloud_quanmiaolightapp20240801/models/_submit_video_analysis_task_request.py +593 -0
- alibabacloud_quanmiaolightapp20240801/models/_submit_video_analysis_task_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_submit_video_analysis_task_response_body.py +103 -0
- alibabacloud_quanmiaolightapp20240801/models/_submit_video_analysis_task_shrink_request.py +202 -0
- alibabacloud_quanmiaolightapp20240801/models/_submit_video_detect_shot_task_request.py +148 -0
- alibabacloud_quanmiaolightapp20240801/models/_submit_video_detect_shot_task_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_submit_video_detect_shot_task_response_body.py +104 -0
- alibabacloud_quanmiaolightapp20240801/models/_submit_video_detect_shot_task_shrink_request.py +146 -0
- alibabacloud_quanmiaolightapp20240801/models/_update_video_analysis_config_request.py +34 -0
- alibabacloud_quanmiaolightapp20240801/models/_update_video_analysis_config_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_update_video_analysis_config_response_body.py +66 -0
- alibabacloud_quanmiaolightapp20240801/models/_update_video_analysis_task_request.py +43 -0
- alibabacloud_quanmiaolightapp20240801/models/_update_video_analysis_task_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_update_video_analysis_task_response_body.py +119 -0
- alibabacloud_quanmiaolightapp20240801/models/_update_video_analysis_tasks_request.py +45 -0
- alibabacloud_quanmiaolightapp20240801/models/_update_video_analysis_tasks_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_update_video_analysis_tasks_response_body.py +136 -0
- alibabacloud_quanmiaolightapp20240801/models/_update_video_analysis_tasks_shrink_request.py +43 -0
- alibabacloud_quanmiaolightapp20240801/models/_update_video_detect_shot_config_request.py +34 -0
- alibabacloud_quanmiaolightapp20240801/models/_update_video_detect_shot_config_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_update_video_detect_shot_config_response_body.py +66 -0
- alibabacloud_quanmiaolightapp20240801/models/_update_video_detect_shot_task_request.py +43 -0
- alibabacloud_quanmiaolightapp20240801/models/_update_video_detect_shot_task_response.py +54 -0
- alibabacloud_quanmiaolightapp20240801/models/_update_video_detect_shot_task_response_body.py +120 -0
- {alibabacloud_quanmiaolightapp20240801-2.13.2.dist-info → alibabacloud_quanmiaolightapp20240801-2.13.3.dist-info}/METADATA +7 -7
- alibabacloud_quanmiaolightapp20240801-2.13.3.dist-info/RECORD +146 -0
- alibabacloud_quanmiaolightapp20240801/models.py +0 -16578
- alibabacloud_quanmiaolightapp20240801-2.13.2.dist-info/RECORD +0 -8
- {alibabacloud_quanmiaolightapp20240801-2.13.2.dist-info → alibabacloud_quanmiaolightapp20240801-2.13.3.dist-info}/LICENSE +0 -0
- {alibabacloud_quanmiaolightapp20240801-2.13.2.dist-info → alibabacloud_quanmiaolightapp20240801-2.13.3.dist-info}/WHEEL +0 -0
- {alibabacloud_quanmiaolightapp20240801-2.13.2.dist-info → alibabacloud_quanmiaolightapp20240801-2.13.3.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,180 @@
|
|
|
1
|
+
# -*- coding: utf-8 -*-
|
|
2
|
+
# This file is auto-generated, don't edit it. Thanks.
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from typing import List
|
|
6
|
+
|
|
7
|
+
from alibabacloud_quanmiaolightapp20240801 import models as main_models
|
|
8
|
+
from darabonba.model import DaraModel
|
|
9
|
+
|
|
10
|
+
class HotNewsRecommendResponseBody(DaraModel):
|
|
11
|
+
def __init__(
|
|
12
|
+
self,
|
|
13
|
+
code: str = None,
|
|
14
|
+
data: main_models.HotNewsRecommendResponseBodyData = None,
|
|
15
|
+
message: str = None,
|
|
16
|
+
request_id: str = None,
|
|
17
|
+
success: bool = None,
|
|
18
|
+
):
|
|
19
|
+
self.code = code
|
|
20
|
+
self.data = data
|
|
21
|
+
self.message = message
|
|
22
|
+
self.request_id = request_id
|
|
23
|
+
self.success = success
|
|
24
|
+
|
|
25
|
+
def validate(self):
|
|
26
|
+
if self.data:
|
|
27
|
+
self.data.validate()
|
|
28
|
+
|
|
29
|
+
def to_map(self):
|
|
30
|
+
result = dict()
|
|
31
|
+
_map = super().to_map()
|
|
32
|
+
if _map is not None:
|
|
33
|
+
result = _map
|
|
34
|
+
if self.code is not None:
|
|
35
|
+
result['code'] = self.code
|
|
36
|
+
|
|
37
|
+
if self.data is not None:
|
|
38
|
+
result['data'] = self.data.to_map()
|
|
39
|
+
|
|
40
|
+
if self.message is not None:
|
|
41
|
+
result['message'] = self.message
|
|
42
|
+
|
|
43
|
+
if self.request_id is not None:
|
|
44
|
+
result['requestId'] = self.request_id
|
|
45
|
+
|
|
46
|
+
if self.success is not None:
|
|
47
|
+
result['success'] = self.success
|
|
48
|
+
|
|
49
|
+
return result
|
|
50
|
+
|
|
51
|
+
def from_map(self, m: dict = None):
|
|
52
|
+
m = m or dict()
|
|
53
|
+
if m.get('code') is not None:
|
|
54
|
+
self.code = m.get('code')
|
|
55
|
+
|
|
56
|
+
if m.get('data') is not None:
|
|
57
|
+
temp_model = main_models.HotNewsRecommendResponseBodyData()
|
|
58
|
+
self.data = temp_model.from_map(m.get('data'))
|
|
59
|
+
|
|
60
|
+
if m.get('message') is not None:
|
|
61
|
+
self.message = m.get('message')
|
|
62
|
+
|
|
63
|
+
if m.get('requestId') is not None:
|
|
64
|
+
self.request_id = m.get('requestId')
|
|
65
|
+
|
|
66
|
+
if m.get('success') is not None:
|
|
67
|
+
self.success = m.get('success')
|
|
68
|
+
|
|
69
|
+
return self
|
|
70
|
+
|
|
71
|
+
class HotNewsRecommendResponseBodyData(DaraModel):
|
|
72
|
+
def __init__(
|
|
73
|
+
self,
|
|
74
|
+
news: List[main_models.HotNewsRecommendResponseBodyDataNews] = None,
|
|
75
|
+
):
|
|
76
|
+
self.news = news
|
|
77
|
+
|
|
78
|
+
def validate(self):
|
|
79
|
+
if self.news:
|
|
80
|
+
for v1 in self.news:
|
|
81
|
+
if v1:
|
|
82
|
+
v1.validate()
|
|
83
|
+
|
|
84
|
+
def to_map(self):
|
|
85
|
+
result = dict()
|
|
86
|
+
_map = super().to_map()
|
|
87
|
+
if _map is not None:
|
|
88
|
+
result = _map
|
|
89
|
+
result['news'] = []
|
|
90
|
+
if self.news is not None:
|
|
91
|
+
for k1 in self.news:
|
|
92
|
+
result['news'].append(k1.to_map() if k1 else None)
|
|
93
|
+
|
|
94
|
+
return result
|
|
95
|
+
|
|
96
|
+
def from_map(self, m: dict = None):
|
|
97
|
+
m = m or dict()
|
|
98
|
+
self.news = []
|
|
99
|
+
if m.get('news') is not None:
|
|
100
|
+
for k1 in m.get('news'):
|
|
101
|
+
temp_model = main_models.HotNewsRecommendResponseBodyDataNews()
|
|
102
|
+
self.news.append(temp_model.from_map(k1))
|
|
103
|
+
|
|
104
|
+
return self
|
|
105
|
+
|
|
106
|
+
class HotNewsRecommendResponseBodyDataNews(DaraModel):
|
|
107
|
+
def __init__(
|
|
108
|
+
self,
|
|
109
|
+
content: str = None,
|
|
110
|
+
image_urls: List[str] = None,
|
|
111
|
+
pub_time: str = None,
|
|
112
|
+
search_source: str = None,
|
|
113
|
+
source: str = None,
|
|
114
|
+
title: str = None,
|
|
115
|
+
url: str = None,
|
|
116
|
+
):
|
|
117
|
+
self.content = content
|
|
118
|
+
self.image_urls = image_urls
|
|
119
|
+
self.pub_time = pub_time
|
|
120
|
+
self.search_source = search_source
|
|
121
|
+
self.source = source
|
|
122
|
+
self.title = title
|
|
123
|
+
self.url = url
|
|
124
|
+
|
|
125
|
+
def validate(self):
|
|
126
|
+
pass
|
|
127
|
+
|
|
128
|
+
def to_map(self):
|
|
129
|
+
result = dict()
|
|
130
|
+
_map = super().to_map()
|
|
131
|
+
if _map is not None:
|
|
132
|
+
result = _map
|
|
133
|
+
if self.content is not None:
|
|
134
|
+
result['content'] = self.content
|
|
135
|
+
|
|
136
|
+
if self.image_urls is not None:
|
|
137
|
+
result['imageUrls'] = self.image_urls
|
|
138
|
+
|
|
139
|
+
if self.pub_time is not None:
|
|
140
|
+
result['pubTime'] = self.pub_time
|
|
141
|
+
|
|
142
|
+
if self.search_source is not None:
|
|
143
|
+
result['searchSource'] = self.search_source
|
|
144
|
+
|
|
145
|
+
if self.source is not None:
|
|
146
|
+
result['source'] = self.source
|
|
147
|
+
|
|
148
|
+
if self.title is not None:
|
|
149
|
+
result['title'] = self.title
|
|
150
|
+
|
|
151
|
+
if self.url is not None:
|
|
152
|
+
result['url'] = self.url
|
|
153
|
+
|
|
154
|
+
return result
|
|
155
|
+
|
|
156
|
+
def from_map(self, m: dict = None):
|
|
157
|
+
m = m or dict()
|
|
158
|
+
if m.get('content') is not None:
|
|
159
|
+
self.content = m.get('content')
|
|
160
|
+
|
|
161
|
+
if m.get('imageUrls') is not None:
|
|
162
|
+
self.image_urls = m.get('imageUrls')
|
|
163
|
+
|
|
164
|
+
if m.get('pubTime') is not None:
|
|
165
|
+
self.pub_time = m.get('pubTime')
|
|
166
|
+
|
|
167
|
+
if m.get('searchSource') is not None:
|
|
168
|
+
self.search_source = m.get('searchSource')
|
|
169
|
+
|
|
170
|
+
if m.get('source') is not None:
|
|
171
|
+
self.source = m.get('source')
|
|
172
|
+
|
|
173
|
+
if m.get('title') is not None:
|
|
174
|
+
self.title = m.get('title')
|
|
175
|
+
|
|
176
|
+
if m.get('url') is not None:
|
|
177
|
+
self.url = m.get('url')
|
|
178
|
+
|
|
179
|
+
return self
|
|
180
|
+
|
|
@@ -0,0 +1,50 @@
|
|
|
1
|
+
# -*- coding: utf-8 -*-
|
|
2
|
+
# This file is auto-generated, don't edit it. Thanks.
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from darabonba.model import DaraModel
|
|
6
|
+
|
|
7
|
+
class ListAnalysisTagDetailByTaskIdRequest(DaraModel):
|
|
8
|
+
def __init__(
|
|
9
|
+
self,
|
|
10
|
+
max_results: int = None,
|
|
11
|
+
next_token: str = None,
|
|
12
|
+
task_id: str = None,
|
|
13
|
+
):
|
|
14
|
+
self.max_results = max_results
|
|
15
|
+
self.next_token = next_token
|
|
16
|
+
# This parameter is required.
|
|
17
|
+
self.task_id = task_id
|
|
18
|
+
|
|
19
|
+
def validate(self):
|
|
20
|
+
pass
|
|
21
|
+
|
|
22
|
+
def to_map(self):
|
|
23
|
+
result = dict()
|
|
24
|
+
_map = super().to_map()
|
|
25
|
+
if _map is not None:
|
|
26
|
+
result = _map
|
|
27
|
+
if self.max_results is not None:
|
|
28
|
+
result['maxResults'] = self.max_results
|
|
29
|
+
|
|
30
|
+
if self.next_token is not None:
|
|
31
|
+
result['nextToken'] = self.next_token
|
|
32
|
+
|
|
33
|
+
if self.task_id is not None:
|
|
34
|
+
result['taskId'] = self.task_id
|
|
35
|
+
|
|
36
|
+
return result
|
|
37
|
+
|
|
38
|
+
def from_map(self, m: dict = None):
|
|
39
|
+
m = m or dict()
|
|
40
|
+
if m.get('maxResults') is not None:
|
|
41
|
+
self.max_results = m.get('maxResults')
|
|
42
|
+
|
|
43
|
+
if m.get('nextToken') is not None:
|
|
44
|
+
self.next_token = m.get('nextToken')
|
|
45
|
+
|
|
46
|
+
if m.get('taskId') is not None:
|
|
47
|
+
self.task_id = m.get('taskId')
|
|
48
|
+
|
|
49
|
+
return self
|
|
50
|
+
|
|
@@ -0,0 +1,54 @@
|
|
|
1
|
+
# -*- coding: utf-8 -*-
|
|
2
|
+
# This file is auto-generated, don't edit it. Thanks.
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from typing import Dict
|
|
6
|
+
|
|
7
|
+
from alibabacloud_quanmiaolightapp20240801 import models as main_models
|
|
8
|
+
from darabonba.model import DaraModel
|
|
9
|
+
|
|
10
|
+
class ListAnalysisTagDetailByTaskIdResponse(DaraModel):
|
|
11
|
+
def __init__(
|
|
12
|
+
self,
|
|
13
|
+
headers: Dict[str, str] = None,
|
|
14
|
+
status_code: int = None,
|
|
15
|
+
body: main_models.ListAnalysisTagDetailByTaskIdResponseBody = None,
|
|
16
|
+
):
|
|
17
|
+
self.headers = headers
|
|
18
|
+
self.status_code = status_code
|
|
19
|
+
self.body = body
|
|
20
|
+
|
|
21
|
+
def validate(self):
|
|
22
|
+
if self.body:
|
|
23
|
+
self.body.validate()
|
|
24
|
+
|
|
25
|
+
def to_map(self):
|
|
26
|
+
result = dict()
|
|
27
|
+
_map = super().to_map()
|
|
28
|
+
if _map is not None:
|
|
29
|
+
result = _map
|
|
30
|
+
if self.headers is not None:
|
|
31
|
+
result['headers'] = self.headers
|
|
32
|
+
|
|
33
|
+
if self.status_code is not None:
|
|
34
|
+
result['statusCode'] = self.status_code
|
|
35
|
+
|
|
36
|
+
if self.body is not None:
|
|
37
|
+
result['body'] = self.body.to_map()
|
|
38
|
+
|
|
39
|
+
return result
|
|
40
|
+
|
|
41
|
+
def from_map(self, m: dict = None):
|
|
42
|
+
m = m or dict()
|
|
43
|
+
if m.get('headers') is not None:
|
|
44
|
+
self.headers = m.get('headers')
|
|
45
|
+
|
|
46
|
+
if m.get('statusCode') is not None:
|
|
47
|
+
self.status_code = m.get('statusCode')
|
|
48
|
+
|
|
49
|
+
if m.get('body') is not None:
|
|
50
|
+
temp_model = main_models.ListAnalysisTagDetailByTaskIdResponseBody()
|
|
51
|
+
self.body = temp_model.from_map(m.get('body'))
|
|
52
|
+
|
|
53
|
+
return self
|
|
54
|
+
|
alibabacloud_quanmiaolightapp20240801/models/_list_analysis_tag_detail_by_task_id_response_body.py
ADDED
|
@@ -0,0 +1,196 @@
|
|
|
1
|
+
# -*- coding: utf-8 -*-
|
|
2
|
+
# This file is auto-generated, don't edit it. Thanks.
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from typing import List
|
|
6
|
+
|
|
7
|
+
from alibabacloud_quanmiaolightapp20240801 import models as main_models
|
|
8
|
+
from darabonba.model import DaraModel
|
|
9
|
+
|
|
10
|
+
class ListAnalysisTagDetailByTaskIdResponseBody(DaraModel):
|
|
11
|
+
def __init__(
|
|
12
|
+
self,
|
|
13
|
+
code: str = None,
|
|
14
|
+
data: List[main_models.ListAnalysisTagDetailByTaskIdResponseBodyData] = None,
|
|
15
|
+
max_results: int = None,
|
|
16
|
+
message: str = None,
|
|
17
|
+
next_token: str = None,
|
|
18
|
+
request_id: str = None,
|
|
19
|
+
success: bool = None,
|
|
20
|
+
total_count: int = None,
|
|
21
|
+
):
|
|
22
|
+
self.code = code
|
|
23
|
+
self.data = data
|
|
24
|
+
self.max_results = max_results
|
|
25
|
+
self.message = message
|
|
26
|
+
# This parameter is required.
|
|
27
|
+
self.next_token = next_token
|
|
28
|
+
# Id of the request
|
|
29
|
+
self.request_id = request_id
|
|
30
|
+
self.success = success
|
|
31
|
+
self.total_count = total_count
|
|
32
|
+
|
|
33
|
+
def validate(self):
|
|
34
|
+
if self.data:
|
|
35
|
+
for v1 in self.data:
|
|
36
|
+
if v1:
|
|
37
|
+
v1.validate()
|
|
38
|
+
|
|
39
|
+
def to_map(self):
|
|
40
|
+
result = dict()
|
|
41
|
+
_map = super().to_map()
|
|
42
|
+
if _map is not None:
|
|
43
|
+
result = _map
|
|
44
|
+
if self.code is not None:
|
|
45
|
+
result['code'] = self.code
|
|
46
|
+
|
|
47
|
+
result['data'] = []
|
|
48
|
+
if self.data is not None:
|
|
49
|
+
for k1 in self.data:
|
|
50
|
+
result['data'].append(k1.to_map() if k1 else None)
|
|
51
|
+
|
|
52
|
+
if self.max_results is not None:
|
|
53
|
+
result['maxResults'] = self.max_results
|
|
54
|
+
|
|
55
|
+
if self.message is not None:
|
|
56
|
+
result['message'] = self.message
|
|
57
|
+
|
|
58
|
+
if self.next_token is not None:
|
|
59
|
+
result['nextToken'] = self.next_token
|
|
60
|
+
|
|
61
|
+
if self.request_id is not None:
|
|
62
|
+
result['requestId'] = self.request_id
|
|
63
|
+
|
|
64
|
+
if self.success is not None:
|
|
65
|
+
result['success'] = self.success
|
|
66
|
+
|
|
67
|
+
if self.total_count is not None:
|
|
68
|
+
result['totalCount'] = self.total_count
|
|
69
|
+
|
|
70
|
+
return result
|
|
71
|
+
|
|
72
|
+
def from_map(self, m: dict = None):
|
|
73
|
+
m = m or dict()
|
|
74
|
+
if m.get('code') is not None:
|
|
75
|
+
self.code = m.get('code')
|
|
76
|
+
|
|
77
|
+
self.data = []
|
|
78
|
+
if m.get('data') is not None:
|
|
79
|
+
for k1 in m.get('data'):
|
|
80
|
+
temp_model = main_models.ListAnalysisTagDetailByTaskIdResponseBodyData()
|
|
81
|
+
self.data.append(temp_model.from_map(k1))
|
|
82
|
+
|
|
83
|
+
if m.get('maxResults') is not None:
|
|
84
|
+
self.max_results = m.get('maxResults')
|
|
85
|
+
|
|
86
|
+
if m.get('message') is not None:
|
|
87
|
+
self.message = m.get('message')
|
|
88
|
+
|
|
89
|
+
if m.get('nextToken') is not None:
|
|
90
|
+
self.next_token = m.get('nextToken')
|
|
91
|
+
|
|
92
|
+
if m.get('requestId') is not None:
|
|
93
|
+
self.request_id = m.get('requestId')
|
|
94
|
+
|
|
95
|
+
if m.get('success') is not None:
|
|
96
|
+
self.success = m.get('success')
|
|
97
|
+
|
|
98
|
+
if m.get('totalCount') is not None:
|
|
99
|
+
self.total_count = m.get('totalCount')
|
|
100
|
+
|
|
101
|
+
return self
|
|
102
|
+
|
|
103
|
+
class ListAnalysisTagDetailByTaskIdResponseBodyData(DaraModel):
|
|
104
|
+
def __init__(
|
|
105
|
+
self,
|
|
106
|
+
content: str = None,
|
|
107
|
+
content_tags: List[main_models.ListAnalysisTagDetailByTaskIdResponseBodyDataContentTags] = None,
|
|
108
|
+
origin_response: str = None,
|
|
109
|
+
source_list: List[str] = None,
|
|
110
|
+
):
|
|
111
|
+
self.content = content
|
|
112
|
+
self.content_tags = content_tags
|
|
113
|
+
self.origin_response = origin_response
|
|
114
|
+
self.source_list = source_list
|
|
115
|
+
|
|
116
|
+
def validate(self):
|
|
117
|
+
if self.content_tags:
|
|
118
|
+
for v1 in self.content_tags:
|
|
119
|
+
if v1:
|
|
120
|
+
v1.validate()
|
|
121
|
+
|
|
122
|
+
def to_map(self):
|
|
123
|
+
result = dict()
|
|
124
|
+
_map = super().to_map()
|
|
125
|
+
if _map is not None:
|
|
126
|
+
result = _map
|
|
127
|
+
if self.content is not None:
|
|
128
|
+
result['content'] = self.content
|
|
129
|
+
|
|
130
|
+
result['contentTags'] = []
|
|
131
|
+
if self.content_tags is not None:
|
|
132
|
+
for k1 in self.content_tags:
|
|
133
|
+
result['contentTags'].append(k1.to_map() if k1 else None)
|
|
134
|
+
|
|
135
|
+
if self.origin_response is not None:
|
|
136
|
+
result['originResponse'] = self.origin_response
|
|
137
|
+
|
|
138
|
+
if self.source_list is not None:
|
|
139
|
+
result['sourceList'] = self.source_list
|
|
140
|
+
|
|
141
|
+
return result
|
|
142
|
+
|
|
143
|
+
def from_map(self, m: dict = None):
|
|
144
|
+
m = m or dict()
|
|
145
|
+
if m.get('content') is not None:
|
|
146
|
+
self.content = m.get('content')
|
|
147
|
+
|
|
148
|
+
self.content_tags = []
|
|
149
|
+
if m.get('contentTags') is not None:
|
|
150
|
+
for k1 in m.get('contentTags'):
|
|
151
|
+
temp_model = main_models.ListAnalysisTagDetailByTaskIdResponseBodyDataContentTags()
|
|
152
|
+
self.content_tags.append(temp_model.from_map(k1))
|
|
153
|
+
|
|
154
|
+
if m.get('originResponse') is not None:
|
|
155
|
+
self.origin_response = m.get('originResponse')
|
|
156
|
+
|
|
157
|
+
if m.get('sourceList') is not None:
|
|
158
|
+
self.source_list = m.get('sourceList')
|
|
159
|
+
|
|
160
|
+
return self
|
|
161
|
+
|
|
162
|
+
class ListAnalysisTagDetailByTaskIdResponseBodyDataContentTags(DaraModel):
|
|
163
|
+
def __init__(
|
|
164
|
+
self,
|
|
165
|
+
tag_name: str = None,
|
|
166
|
+
tags: List[str] = None,
|
|
167
|
+
):
|
|
168
|
+
self.tag_name = tag_name
|
|
169
|
+
self.tags = tags
|
|
170
|
+
|
|
171
|
+
def validate(self):
|
|
172
|
+
pass
|
|
173
|
+
|
|
174
|
+
def to_map(self):
|
|
175
|
+
result = dict()
|
|
176
|
+
_map = super().to_map()
|
|
177
|
+
if _map is not None:
|
|
178
|
+
result = _map
|
|
179
|
+
if self.tag_name is not None:
|
|
180
|
+
result['tagName'] = self.tag_name
|
|
181
|
+
|
|
182
|
+
if self.tags is not None:
|
|
183
|
+
result['tags'] = self.tags
|
|
184
|
+
|
|
185
|
+
return result
|
|
186
|
+
|
|
187
|
+
def from_map(self, m: dict = None):
|
|
188
|
+
m = m or dict()
|
|
189
|
+
if m.get('tagName') is not None:
|
|
190
|
+
self.tag_name = m.get('tagName')
|
|
191
|
+
|
|
192
|
+
if m.get('tags') is not None:
|
|
193
|
+
self.tags = m.get('tags')
|
|
194
|
+
|
|
195
|
+
return self
|
|
196
|
+
|
|
@@ -0,0 +1,65 @@
|
|
|
1
|
+
# -*- coding: utf-8 -*-
|
|
2
|
+
# This file is auto-generated, don't edit it. Thanks.
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from darabonba.model import DaraModel
|
|
6
|
+
|
|
7
|
+
class ListHotTopicSummariesRequest(DaraModel):
|
|
8
|
+
def __init__(
|
|
9
|
+
self,
|
|
10
|
+
category: str = None,
|
|
11
|
+
hot_topic: str = None,
|
|
12
|
+
hot_topic_version: str = None,
|
|
13
|
+
max_results: int = None,
|
|
14
|
+
next_token: str = None,
|
|
15
|
+
):
|
|
16
|
+
self.category = category
|
|
17
|
+
self.hot_topic = hot_topic
|
|
18
|
+
self.hot_topic_version = hot_topic_version
|
|
19
|
+
self.max_results = max_results
|
|
20
|
+
self.next_token = next_token
|
|
21
|
+
|
|
22
|
+
def validate(self):
|
|
23
|
+
pass
|
|
24
|
+
|
|
25
|
+
def to_map(self):
|
|
26
|
+
result = dict()
|
|
27
|
+
_map = super().to_map()
|
|
28
|
+
if _map is not None:
|
|
29
|
+
result = _map
|
|
30
|
+
if self.category is not None:
|
|
31
|
+
result['category'] = self.category
|
|
32
|
+
|
|
33
|
+
if self.hot_topic is not None:
|
|
34
|
+
result['hotTopic'] = self.hot_topic
|
|
35
|
+
|
|
36
|
+
if self.hot_topic_version is not None:
|
|
37
|
+
result['hotTopicVersion'] = self.hot_topic_version
|
|
38
|
+
|
|
39
|
+
if self.max_results is not None:
|
|
40
|
+
result['maxResults'] = self.max_results
|
|
41
|
+
|
|
42
|
+
if self.next_token is not None:
|
|
43
|
+
result['nextToken'] = self.next_token
|
|
44
|
+
|
|
45
|
+
return result
|
|
46
|
+
|
|
47
|
+
def from_map(self, m: dict = None):
|
|
48
|
+
m = m or dict()
|
|
49
|
+
if m.get('category') is not None:
|
|
50
|
+
self.category = m.get('category')
|
|
51
|
+
|
|
52
|
+
if m.get('hotTopic') is not None:
|
|
53
|
+
self.hot_topic = m.get('hotTopic')
|
|
54
|
+
|
|
55
|
+
if m.get('hotTopicVersion') is not None:
|
|
56
|
+
self.hot_topic_version = m.get('hotTopicVersion')
|
|
57
|
+
|
|
58
|
+
if m.get('maxResults') is not None:
|
|
59
|
+
self.max_results = m.get('maxResults')
|
|
60
|
+
|
|
61
|
+
if m.get('nextToken') is not None:
|
|
62
|
+
self.next_token = m.get('nextToken')
|
|
63
|
+
|
|
64
|
+
return self
|
|
65
|
+
|
|
@@ -0,0 +1,54 @@
|
|
|
1
|
+
# -*- coding: utf-8 -*-
|
|
2
|
+
# This file is auto-generated, don't edit it. Thanks.
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from typing import Dict
|
|
6
|
+
|
|
7
|
+
from alibabacloud_quanmiaolightapp20240801 import models as main_models
|
|
8
|
+
from darabonba.model import DaraModel
|
|
9
|
+
|
|
10
|
+
class ListHotTopicSummariesResponse(DaraModel):
|
|
11
|
+
def __init__(
|
|
12
|
+
self,
|
|
13
|
+
headers: Dict[str, str] = None,
|
|
14
|
+
status_code: int = None,
|
|
15
|
+
body: main_models.ListHotTopicSummariesResponseBody = None,
|
|
16
|
+
):
|
|
17
|
+
self.headers = headers
|
|
18
|
+
self.status_code = status_code
|
|
19
|
+
self.body = body
|
|
20
|
+
|
|
21
|
+
def validate(self):
|
|
22
|
+
if self.body:
|
|
23
|
+
self.body.validate()
|
|
24
|
+
|
|
25
|
+
def to_map(self):
|
|
26
|
+
result = dict()
|
|
27
|
+
_map = super().to_map()
|
|
28
|
+
if _map is not None:
|
|
29
|
+
result = _map
|
|
30
|
+
if self.headers is not None:
|
|
31
|
+
result['headers'] = self.headers
|
|
32
|
+
|
|
33
|
+
if self.status_code is not None:
|
|
34
|
+
result['statusCode'] = self.status_code
|
|
35
|
+
|
|
36
|
+
if self.body is not None:
|
|
37
|
+
result['body'] = self.body.to_map()
|
|
38
|
+
|
|
39
|
+
return result
|
|
40
|
+
|
|
41
|
+
def from_map(self, m: dict = None):
|
|
42
|
+
m = m or dict()
|
|
43
|
+
if m.get('headers') is not None:
|
|
44
|
+
self.headers = m.get('headers')
|
|
45
|
+
|
|
46
|
+
if m.get('statusCode') is not None:
|
|
47
|
+
self.status_code = m.get('statusCode')
|
|
48
|
+
|
|
49
|
+
if m.get('body') is not None:
|
|
50
|
+
temp_model = main_models.ListHotTopicSummariesResponseBody()
|
|
51
|
+
self.body = temp_model.from_map(m.get('body'))
|
|
52
|
+
|
|
53
|
+
return self
|
|
54
|
+
|