lockss-pyclient 0.1.0.dev1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- lockss/pyclient/__init__.py +67 -0
- lockss/pyclient/config/__init__.py +42 -0
- lockss/pyclient/config/api/__init__.py +12 -0
- lockss/pyclient/config/api/aus_api.py +2195 -0
- lockss/pyclient/config/api/config_api.py +718 -0
- lockss/pyclient/config/api/plugins_api.py +128 -0
- lockss/pyclient/config/api/status_api.py +120 -0
- lockss/pyclient/config/api/tdb_api.py +318 -0
- lockss/pyclient/config/api/users_api.py +516 -0
- lockss/pyclient/config/api/utils_api.py +128 -0
- lockss/pyclient/config/api_client.py +632 -0
- lockss/pyclient/config/configuration.py +254 -0
- lockss/pyclient/config/models/__init__.py +30 -0
- lockss/pyclient/config/models/api_status.py +344 -0
- lockss/pyclient/config/models/au_configuration.py +142 -0
- lockss/pyclient/config/models/au_status.py +113 -0
- lockss/pyclient/config/models/au_ws_result.py +113 -0
- lockss/pyclient/config/models/auids_body.py +168 -0
- lockss/pyclient/config/models/check_substance_result.py +212 -0
- lockss/pyclient/config/models/content_configuration_result.py +200 -0
- lockss/pyclient/config/models/file_section_name_body.py +113 -0
- lockss/pyclient/config/models/platform_configuration_ws_result.py +113 -0
- lockss/pyclient/config/models/plugin_ws_result.py +345 -0
- lockss/pyclient/config/models/request_au_control_result.py +171 -0
- lockss/pyclient/config/models/tdb_au_ws_result.py +360 -0
- lockss/pyclient/config/models/tdb_publisher_ws_result.py +113 -0
- lockss/pyclient/config/models/tdb_title_ws_result.py +390 -0
- lockss/pyclient/config/rest.py +317 -0
- lockss/pyclient/crawler/__init__.py +45 -0
- lockss/pyclient/crawler/api/__init__.py +10 -0
- lockss/pyclient/crawler/api/crawlers_api.py +215 -0
- lockss/pyclient/crawler/api/crawls_api.py +952 -0
- lockss/pyclient/crawler/api/jobs_api.py +504 -0
- lockss/pyclient/crawler/api/status_api.py +120 -0
- lockss/pyclient/crawler/api/ws_api.py +128 -0
- lockss/pyclient/crawler/api_client.py +632 -0
- lockss/pyclient/crawler/configuration.py +254 -0
- lockss/pyclient/crawler/models/__init__.py +35 -0
- lockss/pyclient/crawler/models/api_status.py +344 -0
- lockss/pyclient/crawler/models/counter.py +142 -0
- lockss/pyclient/crawler/models/crawl_desc.py +344 -0
- lockss/pyclient/crawler/models/crawl_job.py +280 -0
- lockss/pyclient/crawler/models/crawl_pager.py +140 -0
- lockss/pyclient/crawler/models/crawl_status.py +780 -0
- lockss/pyclient/crawler/models/crawl_ws_result.py +814 -0
- lockss/pyclient/crawler/models/crawl_ws_result_pages_with_errors.py +162 -0
- lockss/pyclient/crawler/models/crawler_config.py +142 -0
- lockss/pyclient/crawler/models/crawler_status.py +279 -0
- lockss/pyclient/crawler/models/crawler_statuses.py +112 -0
- lockss/pyclient/crawler/models/error_result.py +164 -0
- lockss/pyclient/crawler/models/job_pager.py +140 -0
- lockss/pyclient/crawler/models/job_status.py +147 -0
- lockss/pyclient/crawler/models/mime_counter.py +169 -0
- lockss/pyclient/crawler/models/page_info.py +228 -0
- lockss/pyclient/crawler/models/url_error.py +148 -0
- lockss/pyclient/crawler/models/url_info.py +167 -0
- lockss/pyclient/crawler/models/url_pager.py +140 -0
- lockss/pyclient/crawler/rest.py +317 -0
- lockss/pyclient/md/__init__.py +36 -0
- lockss/pyclient/md/api/__init__.py +9 -0
- lockss/pyclient/md/api/mdupdates_api.py +508 -0
- lockss/pyclient/md/api/metadata_api.py +136 -0
- lockss/pyclient/md/api/status_api.py +120 -0
- lockss/pyclient/md/api/urls_api.py +224 -0
- lockss/pyclient/md/api_client.py +632 -0
- lockss/pyclient/md/configuration.py +254 -0
- lockss/pyclient/md/models/__init__.py +27 -0
- lockss/pyclient/md/models/api_status.py +344 -0
- lockss/pyclient/md/models/au.py +169 -0
- lockss/pyclient/md/models/au_metadata_page_info.py +140 -0
- lockss/pyclient/md/models/error_result.py +164 -0
- lockss/pyclient/md/models/item_metadata.py +196 -0
- lockss/pyclient/md/models/job.py +280 -0
- lockss/pyclient/md/models/job_page_info.py +140 -0
- lockss/pyclient/md/models/metadata_update_spec.py +142 -0
- lockss/pyclient/md/models/page_info.py +228 -0
- lockss/pyclient/md/models/status.py +142 -0
- lockss/pyclient/md/models/url_info.py +142 -0
- lockss/pyclient/md/rest.py +317 -0
- lockss/pyclient/poller/__init__.py +54 -0
- lockss/pyclient/poller/api/__init__.py +13 -0
- lockss/pyclient/poller/api/export_api.py +156 -0
- lockss/pyclient/poller/api/hash_api.py +413 -0
- lockss/pyclient/poller/api/import_api.py +157 -0
- lockss/pyclient/poller/api/poll_detail_api.py +374 -0
- lockss/pyclient/poller/api/poller_polls_api.py +223 -0
- lockss/pyclient/poller/api/repo_api.py +223 -0
- lockss/pyclient/poller/api/service_api.py +694 -0
- lockss/pyclient/poller/api/voter_polls_api.py +223 -0
- lockss/pyclient/poller/api_client.py +632 -0
- lockss/pyclient/poller/configuration.py +254 -0
- lockss/pyclient/poller/models/__init__.py +41 -0
- lockss/pyclient/poller/models/api_status.py +344 -0
- lockss/pyclient/poller/models/aus_import_body.py +199 -0
- lockss/pyclient/poller/models/cached_uri_set_spec.py +169 -0
- lockss/pyclient/poller/models/error_result.py +164 -0
- lockss/pyclient/poller/models/hasher_ws_params.py +432 -0
- lockss/pyclient/poller/models/link_desc.py +141 -0
- lockss/pyclient/poller/models/page_desc.py +227 -0
- lockss/pyclient/poller/models/peer_data.py +638 -0
- lockss/pyclient/poller/models/peer_ws_result.py +113 -0
- lockss/pyclient/poller/models/poll_desc.py +285 -0
- lockss/pyclient/poller/models/poll_ws_result.py +142 -0
- lockss/pyclient/poller/models/poller_detail.py +613 -0
- lockss/pyclient/poller/models/poller_pager.py +139 -0
- lockss/pyclient/poller/models/poller_summary.py +452 -0
- lockss/pyclient/poller/models/repair_data.py +176 -0
- lockss/pyclient/poller/models/repair_pager.py +139 -0
- lockss/pyclient/poller/models/repair_queue.py +249 -0
- lockss/pyclient/poller/models/repository_space_ws_result.py +113 -0
- lockss/pyclient/poller/models/repository_ws_result.py +113 -0
- lockss/pyclient/poller/models/tally_data.py +471 -0
- lockss/pyclient/poller/models/url_pager.py +139 -0
- lockss/pyclient/poller/models/vote_ws_result.py +142 -0
- lockss/pyclient/poller/models/voter_detail.py +701 -0
- lockss/pyclient/poller/models/voter_pager.py +139 -0
- lockss/pyclient/poller/models/voter_summary.py +284 -0
- lockss/pyclient/poller/rest.py +317 -0
- lockss/pyclient/rs/__init__.py +41 -0
- lockss/pyclient/rs/api/__init__.py +10 -0
- lockss/pyclient/rs/api/artifacts_api.py +988 -0
- lockss/pyclient/rs/api/aus_api.py +334 -0
- lockss/pyclient/rs/api/repo_api.py +379 -0
- lockss/pyclient/rs/api/status_api.py +120 -0
- lockss/pyclient/rs/api/wayback_api.py +386 -0
- lockss/pyclient/rs/api_client.py +632 -0
- lockss/pyclient/rs/configuration.py +247 -0
- lockss/pyclient/rs/models/__init__.py +31 -0
- lockss/pyclient/rs/models/api_status.py +344 -0
- lockss/pyclient/rs/models/archives_body.py +142 -0
- lockss/pyclient/rs/models/artifact.py +344 -0
- lockss/pyclient/rs/models/artifact_page_info.py +140 -0
- lockss/pyclient/rs/models/artifact_properties.py +344 -0
- lockss/pyclient/rs/models/artifacts_body.py +170 -0
- lockss/pyclient/rs/models/au_size.py +162 -0
- lockss/pyclient/rs/models/auid_page_info.py +140 -0
- lockss/pyclient/rs/models/error_result.py +164 -0
- lockss/pyclient/rs/models/import_status.py +298 -0
- lockss/pyclient/rs/models/page_info.py +229 -0
- lockss/pyclient/rs/models/repository_info.py +164 -0
- lockss/pyclient/rs/models/repository_statistics.py +112 -0
- lockss/pyclient/rs/models/storage_info.py +287 -0
- lockss/pyclient/rs/models/streaming_response_body.py +84 -0
- lockss/pyclient/rs/rest.py +317 -0
- lockss_pyclient-0.1.0.dev1.dist-info/LICENSE +27 -0
- lockss_pyclient-0.1.0.dev1.dist-info/METADATA +29 -0
- lockss_pyclient-0.1.0.dev1.dist-info/RECORD +148 -0
- lockss_pyclient-0.1.0.dev1.dist-info/WHEEL +4 -0
|
@@ -0,0 +1,280 @@
|
|
|
1
|
+
# coding: utf-8
|
|
2
|
+
|
|
3
|
+
"""
|
|
4
|
+
LOCKSS Crawler Service REST API
|
|
5
|
+
|
|
6
|
+
REST API of the LOCKSS Crawler Service # noqa: E501
|
|
7
|
+
|
|
8
|
+
OpenAPI spec version: 2.0.0
|
|
9
|
+
Contact: lockss-support@lockss.org
|
|
10
|
+
Generated by: https://github.com/swagger-api/swagger-codegen.git
|
|
11
|
+
"""
|
|
12
|
+
|
|
13
|
+
import pprint
|
|
14
|
+
import re # noqa: F401
|
|
15
|
+
|
|
16
|
+
import six
|
|
17
|
+
|
|
18
|
+
class CrawlJob(object):
|
|
19
|
+
"""NOTE: This class is auto generated by the swagger code generator program.
|
|
20
|
+
|
|
21
|
+
Do not edit the class manually.
|
|
22
|
+
"""
|
|
23
|
+
"""
|
|
24
|
+
Attributes:
|
|
25
|
+
swagger_types (dict): The key is attribute name
|
|
26
|
+
and the value is attribute type.
|
|
27
|
+
attribute_map (dict): The key is attribute name
|
|
28
|
+
and the value is json key in definition.
|
|
29
|
+
"""
|
|
30
|
+
swagger_types = {
|
|
31
|
+
'crawl_desc': 'CrawlDesc',
|
|
32
|
+
'request_date': 'int',
|
|
33
|
+
'job_id': 'str',
|
|
34
|
+
'job_status': 'JobStatus',
|
|
35
|
+
'start_date': 'int',
|
|
36
|
+
'end_date': 'int',
|
|
37
|
+
'result': 'str'
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
attribute_map = {
|
|
41
|
+
'crawl_desc': 'crawlDesc',
|
|
42
|
+
'request_date': 'requestDate',
|
|
43
|
+
'job_id': 'jobId',
|
|
44
|
+
'job_status': 'jobStatus',
|
|
45
|
+
'start_date': 'startDate',
|
|
46
|
+
'end_date': 'endDate',
|
|
47
|
+
'result': 'result'
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
def __init__(self, crawl_desc=None, request_date=None, job_id=None, job_status=None, start_date=None, end_date=None, result=None): # noqa: E501
|
|
51
|
+
"""CrawlJob - a model defined in Swagger""" # noqa: E501
|
|
52
|
+
self._crawl_desc = None
|
|
53
|
+
self._request_date = None
|
|
54
|
+
self._job_id = None
|
|
55
|
+
self._job_status = None
|
|
56
|
+
self._start_date = None
|
|
57
|
+
self._end_date = None
|
|
58
|
+
self._result = None
|
|
59
|
+
self.discriminator = None
|
|
60
|
+
self.crawl_desc = crawl_desc
|
|
61
|
+
self.request_date = request_date
|
|
62
|
+
self.job_id = job_id
|
|
63
|
+
self.job_status = job_status
|
|
64
|
+
if start_date is not None:
|
|
65
|
+
self.start_date = start_date
|
|
66
|
+
if end_date is not None:
|
|
67
|
+
self.end_date = end_date
|
|
68
|
+
if result is not None:
|
|
69
|
+
self.result = result
|
|
70
|
+
|
|
71
|
+
@property
|
|
72
|
+
def crawl_desc(self):
|
|
73
|
+
"""Gets the crawl_desc of this CrawlJob. # noqa: E501
|
|
74
|
+
|
|
75
|
+
|
|
76
|
+
:return: The crawl_desc of this CrawlJob. # noqa: E501
|
|
77
|
+
:rtype: CrawlDesc
|
|
78
|
+
"""
|
|
79
|
+
return self._crawl_desc
|
|
80
|
+
|
|
81
|
+
@crawl_desc.setter
|
|
82
|
+
def crawl_desc(self, crawl_desc):
|
|
83
|
+
"""Sets the crawl_desc of this CrawlJob.
|
|
84
|
+
|
|
85
|
+
|
|
86
|
+
:param crawl_desc: The crawl_desc of this CrawlJob. # noqa: E501
|
|
87
|
+
:type: CrawlDesc
|
|
88
|
+
"""
|
|
89
|
+
if crawl_desc is None:
|
|
90
|
+
raise ValueError("Invalid value for `crawl_desc`, must not be `None`") # noqa: E501
|
|
91
|
+
|
|
92
|
+
self._crawl_desc = crawl_desc
|
|
93
|
+
|
|
94
|
+
@property
|
|
95
|
+
def request_date(self):
|
|
96
|
+
"""Gets the request_date of this CrawlJob. # noqa: E501
|
|
97
|
+
|
|
98
|
+
The timestamp when the crawl was requested. # noqa: E501
|
|
99
|
+
|
|
100
|
+
:return: The request_date of this CrawlJob. # noqa: E501
|
|
101
|
+
:rtype: int
|
|
102
|
+
"""
|
|
103
|
+
return self._request_date
|
|
104
|
+
|
|
105
|
+
@request_date.setter
|
|
106
|
+
def request_date(self, request_date):
|
|
107
|
+
"""Sets the request_date of this CrawlJob.
|
|
108
|
+
|
|
109
|
+
The timestamp when the crawl was requested. # noqa: E501
|
|
110
|
+
|
|
111
|
+
:param request_date: The request_date of this CrawlJob. # noqa: E501
|
|
112
|
+
:type: int
|
|
113
|
+
"""
|
|
114
|
+
if request_date is None:
|
|
115
|
+
raise ValueError("Invalid value for `request_date`, must not be `None`") # noqa: E501
|
|
116
|
+
|
|
117
|
+
self._request_date = request_date
|
|
118
|
+
|
|
119
|
+
@property
|
|
120
|
+
def job_id(self):
|
|
121
|
+
"""Gets the job_id of this CrawlJob. # noqa: E501
|
|
122
|
+
|
|
123
|
+
Identifier of the crawl job. # noqa: E501
|
|
124
|
+
|
|
125
|
+
:return: The job_id of this CrawlJob. # noqa: E501
|
|
126
|
+
:rtype: str
|
|
127
|
+
"""
|
|
128
|
+
return self._job_id
|
|
129
|
+
|
|
130
|
+
@job_id.setter
|
|
131
|
+
def job_id(self, job_id):
|
|
132
|
+
"""Sets the job_id of this CrawlJob.
|
|
133
|
+
|
|
134
|
+
Identifier of the crawl job. # noqa: E501
|
|
135
|
+
|
|
136
|
+
:param job_id: The job_id of this CrawlJob. # noqa: E501
|
|
137
|
+
:type: str
|
|
138
|
+
"""
|
|
139
|
+
if job_id is None:
|
|
140
|
+
raise ValueError("Invalid value for `job_id`, must not be `None`") # noqa: E501
|
|
141
|
+
|
|
142
|
+
self._job_id = job_id
|
|
143
|
+
|
|
144
|
+
@property
|
|
145
|
+
def job_status(self):
|
|
146
|
+
"""Gets the job_status of this CrawlJob. # noqa: E501
|
|
147
|
+
|
|
148
|
+
|
|
149
|
+
:return: The job_status of this CrawlJob. # noqa: E501
|
|
150
|
+
:rtype: JobStatus
|
|
151
|
+
"""
|
|
152
|
+
return self._job_status
|
|
153
|
+
|
|
154
|
+
@job_status.setter
|
|
155
|
+
def job_status(self, job_status):
|
|
156
|
+
"""Sets the job_status of this CrawlJob.
|
|
157
|
+
|
|
158
|
+
|
|
159
|
+
:param job_status: The job_status of this CrawlJob. # noqa: E501
|
|
160
|
+
:type: JobStatus
|
|
161
|
+
"""
|
|
162
|
+
if job_status is None:
|
|
163
|
+
raise ValueError("Invalid value for `job_status`, must not be `None`") # noqa: E501
|
|
164
|
+
|
|
165
|
+
self._job_status = job_status
|
|
166
|
+
|
|
167
|
+
@property
|
|
168
|
+
def start_date(self):
|
|
169
|
+
"""Gets the start_date of this CrawlJob. # noqa: E501
|
|
170
|
+
|
|
171
|
+
The timestamp when the crawl began. # noqa: E501
|
|
172
|
+
|
|
173
|
+
:return: The start_date of this CrawlJob. # noqa: E501
|
|
174
|
+
:rtype: int
|
|
175
|
+
"""
|
|
176
|
+
return self._start_date
|
|
177
|
+
|
|
178
|
+
@start_date.setter
|
|
179
|
+
def start_date(self, start_date):
|
|
180
|
+
"""Sets the start_date of this CrawlJob.
|
|
181
|
+
|
|
182
|
+
The timestamp when the crawl began. # noqa: E501
|
|
183
|
+
|
|
184
|
+
:param start_date: The start_date of this CrawlJob. # noqa: E501
|
|
185
|
+
:type: int
|
|
186
|
+
"""
|
|
187
|
+
|
|
188
|
+
self._start_date = start_date
|
|
189
|
+
|
|
190
|
+
@property
|
|
191
|
+
def end_date(self):
|
|
192
|
+
"""Gets the end_date of this CrawlJob. # noqa: E501
|
|
193
|
+
|
|
194
|
+
The timestamp when the crawl ended. # noqa: E501
|
|
195
|
+
|
|
196
|
+
:return: The end_date of this CrawlJob. # noqa: E501
|
|
197
|
+
:rtype: int
|
|
198
|
+
"""
|
|
199
|
+
return self._end_date
|
|
200
|
+
|
|
201
|
+
@end_date.setter
|
|
202
|
+
def end_date(self, end_date):
|
|
203
|
+
"""Sets the end_date of this CrawlJob.
|
|
204
|
+
|
|
205
|
+
The timestamp when the crawl ended. # noqa: E501
|
|
206
|
+
|
|
207
|
+
:param end_date: The end_date of this CrawlJob. # noqa: E501
|
|
208
|
+
:type: int
|
|
209
|
+
"""
|
|
210
|
+
|
|
211
|
+
self._end_date = end_date
|
|
212
|
+
|
|
213
|
+
@property
|
|
214
|
+
def result(self):
|
|
215
|
+
"""Gets the result of this CrawlJob. # noqa: E501
|
|
216
|
+
|
|
217
|
+
A URI which can be used to retrieve the crawl data. # noqa: E501
|
|
218
|
+
|
|
219
|
+
:return: The result of this CrawlJob. # noqa: E501
|
|
220
|
+
:rtype: str
|
|
221
|
+
"""
|
|
222
|
+
return self._result
|
|
223
|
+
|
|
224
|
+
@result.setter
|
|
225
|
+
def result(self, result):
|
|
226
|
+
"""Sets the result of this CrawlJob.
|
|
227
|
+
|
|
228
|
+
A URI which can be used to retrieve the crawl data. # noqa: E501
|
|
229
|
+
|
|
230
|
+
:param result: The result of this CrawlJob. # noqa: E501
|
|
231
|
+
:type: str
|
|
232
|
+
"""
|
|
233
|
+
|
|
234
|
+
self._result = result
|
|
235
|
+
|
|
236
|
+
def to_dict(self):
|
|
237
|
+
"""Returns the model properties as a dict"""
|
|
238
|
+
result = {}
|
|
239
|
+
|
|
240
|
+
for attr, _ in six.iteritems(self.swagger_types):
|
|
241
|
+
value = getattr(self, attr)
|
|
242
|
+
if isinstance(value, list):
|
|
243
|
+
result[attr] = list(map(
|
|
244
|
+
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
|
|
245
|
+
value
|
|
246
|
+
))
|
|
247
|
+
elif hasattr(value, "to_dict"):
|
|
248
|
+
result[attr] = value.to_dict()
|
|
249
|
+
elif isinstance(value, dict):
|
|
250
|
+
result[attr] = dict(map(
|
|
251
|
+
lambda item: (item[0], item[1].to_dict())
|
|
252
|
+
if hasattr(item[1], "to_dict") else item,
|
|
253
|
+
value.items()
|
|
254
|
+
))
|
|
255
|
+
else:
|
|
256
|
+
result[attr] = value
|
|
257
|
+
if issubclass(CrawlJob, dict):
|
|
258
|
+
for key, value in self.items():
|
|
259
|
+
result[key] = value
|
|
260
|
+
|
|
261
|
+
return result
|
|
262
|
+
|
|
263
|
+
def to_str(self):
|
|
264
|
+
"""Returns the string representation of the model"""
|
|
265
|
+
return pprint.pformat(self.to_dict())
|
|
266
|
+
|
|
267
|
+
def __repr__(self):
|
|
268
|
+
"""For `print` and `pprint`"""
|
|
269
|
+
return self.to_str()
|
|
270
|
+
|
|
271
|
+
def __eq__(self, other):
|
|
272
|
+
"""Returns true if both objects are equal"""
|
|
273
|
+
if not isinstance(other, CrawlJob):
|
|
274
|
+
return False
|
|
275
|
+
|
|
276
|
+
return self.__dict__ == other.__dict__
|
|
277
|
+
|
|
278
|
+
def __ne__(self, other):
|
|
279
|
+
"""Returns true if both objects are not equal"""
|
|
280
|
+
return not self == other
|
|
@@ -0,0 +1,140 @@
|
|
|
1
|
+
# coding: utf-8
|
|
2
|
+
|
|
3
|
+
"""
|
|
4
|
+
LOCKSS Crawler Service REST API
|
|
5
|
+
|
|
6
|
+
REST API of the LOCKSS Crawler Service # noqa: E501
|
|
7
|
+
|
|
8
|
+
OpenAPI spec version: 2.0.0
|
|
9
|
+
Contact: lockss-support@lockss.org
|
|
10
|
+
Generated by: https://github.com/swagger-api/swagger-codegen.git
|
|
11
|
+
"""
|
|
12
|
+
|
|
13
|
+
import pprint
|
|
14
|
+
import re # noqa: F401
|
|
15
|
+
|
|
16
|
+
import six
|
|
17
|
+
|
|
18
|
+
class CrawlPager(object):
|
|
19
|
+
"""NOTE: This class is auto generated by the swagger code generator program.
|
|
20
|
+
|
|
21
|
+
Do not edit the class manually.
|
|
22
|
+
"""
|
|
23
|
+
"""
|
|
24
|
+
Attributes:
|
|
25
|
+
swagger_types (dict): The key is attribute name
|
|
26
|
+
and the value is attribute type.
|
|
27
|
+
attribute_map (dict): The key is attribute name
|
|
28
|
+
and the value is json key in definition.
|
|
29
|
+
"""
|
|
30
|
+
swagger_types = {
|
|
31
|
+
'crawls': 'list[CrawlStatus]',
|
|
32
|
+
'page_info': 'PageInfo'
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
attribute_map = {
|
|
36
|
+
'crawls': 'crawls',
|
|
37
|
+
'page_info': 'pageInfo'
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
def __init__(self, crawls=None, page_info=None): # noqa: E501
|
|
41
|
+
"""CrawlPager - a model defined in Swagger""" # noqa: E501
|
|
42
|
+
self._crawls = None
|
|
43
|
+
self._page_info = None
|
|
44
|
+
self.discriminator = None
|
|
45
|
+
self.crawls = crawls
|
|
46
|
+
self.page_info = page_info
|
|
47
|
+
|
|
48
|
+
@property
|
|
49
|
+
def crawls(self):
|
|
50
|
+
"""Gets the crawls of this CrawlPager. # noqa: E501
|
|
51
|
+
|
|
52
|
+
The crawls displayed in the page # noqa: E501
|
|
53
|
+
|
|
54
|
+
:return: The crawls of this CrawlPager. # noqa: E501
|
|
55
|
+
:rtype: list[CrawlStatus]
|
|
56
|
+
"""
|
|
57
|
+
return self._crawls
|
|
58
|
+
|
|
59
|
+
@crawls.setter
|
|
60
|
+
def crawls(self, crawls):
|
|
61
|
+
"""Sets the crawls of this CrawlPager.
|
|
62
|
+
|
|
63
|
+
The crawls displayed in the page # noqa: E501
|
|
64
|
+
|
|
65
|
+
:param crawls: The crawls of this CrawlPager. # noqa: E501
|
|
66
|
+
:type: list[CrawlStatus]
|
|
67
|
+
"""
|
|
68
|
+
if crawls is None:
|
|
69
|
+
raise ValueError("Invalid value for `crawls`, must not be `None`") # noqa: E501
|
|
70
|
+
|
|
71
|
+
self._crawls = crawls
|
|
72
|
+
|
|
73
|
+
@property
|
|
74
|
+
def page_info(self):
|
|
75
|
+
"""Gets the page_info of this CrawlPager. # noqa: E501
|
|
76
|
+
|
|
77
|
+
|
|
78
|
+
:return: The page_info of this CrawlPager. # noqa: E501
|
|
79
|
+
:rtype: PageInfo
|
|
80
|
+
"""
|
|
81
|
+
return self._page_info
|
|
82
|
+
|
|
83
|
+
@page_info.setter
|
|
84
|
+
def page_info(self, page_info):
|
|
85
|
+
"""Sets the page_info of this CrawlPager.
|
|
86
|
+
|
|
87
|
+
|
|
88
|
+
:param page_info: The page_info of this CrawlPager. # noqa: E501
|
|
89
|
+
:type: PageInfo
|
|
90
|
+
"""
|
|
91
|
+
if page_info is None:
|
|
92
|
+
raise ValueError("Invalid value for `page_info`, must not be `None`") # noqa: E501
|
|
93
|
+
|
|
94
|
+
self._page_info = page_info
|
|
95
|
+
|
|
96
|
+
def to_dict(self):
|
|
97
|
+
"""Returns the model properties as a dict"""
|
|
98
|
+
result = {}
|
|
99
|
+
|
|
100
|
+
for attr, _ in six.iteritems(self.swagger_types):
|
|
101
|
+
value = getattr(self, attr)
|
|
102
|
+
if isinstance(value, list):
|
|
103
|
+
result[attr] = list(map(
|
|
104
|
+
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
|
|
105
|
+
value
|
|
106
|
+
))
|
|
107
|
+
elif hasattr(value, "to_dict"):
|
|
108
|
+
result[attr] = value.to_dict()
|
|
109
|
+
elif isinstance(value, dict):
|
|
110
|
+
result[attr] = dict(map(
|
|
111
|
+
lambda item: (item[0], item[1].to_dict())
|
|
112
|
+
if hasattr(item[1], "to_dict") else item,
|
|
113
|
+
value.items()
|
|
114
|
+
))
|
|
115
|
+
else:
|
|
116
|
+
result[attr] = value
|
|
117
|
+
if issubclass(CrawlPager, dict):
|
|
118
|
+
for key, value in self.items():
|
|
119
|
+
result[key] = value
|
|
120
|
+
|
|
121
|
+
return result
|
|
122
|
+
|
|
123
|
+
def to_str(self):
|
|
124
|
+
"""Returns the string representation of the model"""
|
|
125
|
+
return pprint.pformat(self.to_dict())
|
|
126
|
+
|
|
127
|
+
def __repr__(self):
|
|
128
|
+
"""For `print` and `pprint`"""
|
|
129
|
+
return self.to_str()
|
|
130
|
+
|
|
131
|
+
def __eq__(self, other):
|
|
132
|
+
"""Returns true if both objects are equal"""
|
|
133
|
+
if not isinstance(other, CrawlPager):
|
|
134
|
+
return False
|
|
135
|
+
|
|
136
|
+
return self.__dict__ == other.__dict__
|
|
137
|
+
|
|
138
|
+
def __ne__(self, other):
|
|
139
|
+
"""Returns true if both objects are not equal"""
|
|
140
|
+
return not self == other
|