lockss-pyclient 0.1.0.dev1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- lockss/pyclient/__init__.py +67 -0
- lockss/pyclient/config/__init__.py +42 -0
- lockss/pyclient/config/api/__init__.py +12 -0
- lockss/pyclient/config/api/aus_api.py +2195 -0
- lockss/pyclient/config/api/config_api.py +718 -0
- lockss/pyclient/config/api/plugins_api.py +128 -0
- lockss/pyclient/config/api/status_api.py +120 -0
- lockss/pyclient/config/api/tdb_api.py +318 -0
- lockss/pyclient/config/api/users_api.py +516 -0
- lockss/pyclient/config/api/utils_api.py +128 -0
- lockss/pyclient/config/api_client.py +632 -0
- lockss/pyclient/config/configuration.py +254 -0
- lockss/pyclient/config/models/__init__.py +30 -0
- lockss/pyclient/config/models/api_status.py +344 -0
- lockss/pyclient/config/models/au_configuration.py +142 -0
- lockss/pyclient/config/models/au_status.py +113 -0
- lockss/pyclient/config/models/au_ws_result.py +113 -0
- lockss/pyclient/config/models/auids_body.py +168 -0
- lockss/pyclient/config/models/check_substance_result.py +212 -0
- lockss/pyclient/config/models/content_configuration_result.py +200 -0
- lockss/pyclient/config/models/file_section_name_body.py +113 -0
- lockss/pyclient/config/models/platform_configuration_ws_result.py +113 -0
- lockss/pyclient/config/models/plugin_ws_result.py +345 -0
- lockss/pyclient/config/models/request_au_control_result.py +171 -0
- lockss/pyclient/config/models/tdb_au_ws_result.py +360 -0
- lockss/pyclient/config/models/tdb_publisher_ws_result.py +113 -0
- lockss/pyclient/config/models/tdb_title_ws_result.py +390 -0
- lockss/pyclient/config/rest.py +317 -0
- lockss/pyclient/crawler/__init__.py +45 -0
- lockss/pyclient/crawler/api/__init__.py +10 -0
- lockss/pyclient/crawler/api/crawlers_api.py +215 -0
- lockss/pyclient/crawler/api/crawls_api.py +952 -0
- lockss/pyclient/crawler/api/jobs_api.py +504 -0
- lockss/pyclient/crawler/api/status_api.py +120 -0
- lockss/pyclient/crawler/api/ws_api.py +128 -0
- lockss/pyclient/crawler/api_client.py +632 -0
- lockss/pyclient/crawler/configuration.py +254 -0
- lockss/pyclient/crawler/models/__init__.py +35 -0
- lockss/pyclient/crawler/models/api_status.py +344 -0
- lockss/pyclient/crawler/models/counter.py +142 -0
- lockss/pyclient/crawler/models/crawl_desc.py +344 -0
- lockss/pyclient/crawler/models/crawl_job.py +280 -0
- lockss/pyclient/crawler/models/crawl_pager.py +140 -0
- lockss/pyclient/crawler/models/crawl_status.py +780 -0
- lockss/pyclient/crawler/models/crawl_ws_result.py +814 -0
- lockss/pyclient/crawler/models/crawl_ws_result_pages_with_errors.py +162 -0
- lockss/pyclient/crawler/models/crawler_config.py +142 -0
- lockss/pyclient/crawler/models/crawler_status.py +279 -0
- lockss/pyclient/crawler/models/crawler_statuses.py +112 -0
- lockss/pyclient/crawler/models/error_result.py +164 -0
- lockss/pyclient/crawler/models/job_pager.py +140 -0
- lockss/pyclient/crawler/models/job_status.py +147 -0
- lockss/pyclient/crawler/models/mime_counter.py +169 -0
- lockss/pyclient/crawler/models/page_info.py +228 -0
- lockss/pyclient/crawler/models/url_error.py +148 -0
- lockss/pyclient/crawler/models/url_info.py +167 -0
- lockss/pyclient/crawler/models/url_pager.py +140 -0
- lockss/pyclient/crawler/rest.py +317 -0
- lockss/pyclient/md/__init__.py +36 -0
- lockss/pyclient/md/api/__init__.py +9 -0
- lockss/pyclient/md/api/mdupdates_api.py +508 -0
- lockss/pyclient/md/api/metadata_api.py +136 -0
- lockss/pyclient/md/api/status_api.py +120 -0
- lockss/pyclient/md/api/urls_api.py +224 -0
- lockss/pyclient/md/api_client.py +632 -0
- lockss/pyclient/md/configuration.py +254 -0
- lockss/pyclient/md/models/__init__.py +27 -0
- lockss/pyclient/md/models/api_status.py +344 -0
- lockss/pyclient/md/models/au.py +169 -0
- lockss/pyclient/md/models/au_metadata_page_info.py +140 -0
- lockss/pyclient/md/models/error_result.py +164 -0
- lockss/pyclient/md/models/item_metadata.py +196 -0
- lockss/pyclient/md/models/job.py +280 -0
- lockss/pyclient/md/models/job_page_info.py +140 -0
- lockss/pyclient/md/models/metadata_update_spec.py +142 -0
- lockss/pyclient/md/models/page_info.py +228 -0
- lockss/pyclient/md/models/status.py +142 -0
- lockss/pyclient/md/models/url_info.py +142 -0
- lockss/pyclient/md/rest.py +317 -0
- lockss/pyclient/poller/__init__.py +54 -0
- lockss/pyclient/poller/api/__init__.py +13 -0
- lockss/pyclient/poller/api/export_api.py +156 -0
- lockss/pyclient/poller/api/hash_api.py +413 -0
- lockss/pyclient/poller/api/import_api.py +157 -0
- lockss/pyclient/poller/api/poll_detail_api.py +374 -0
- lockss/pyclient/poller/api/poller_polls_api.py +223 -0
- lockss/pyclient/poller/api/repo_api.py +223 -0
- lockss/pyclient/poller/api/service_api.py +694 -0
- lockss/pyclient/poller/api/voter_polls_api.py +223 -0
- lockss/pyclient/poller/api_client.py +632 -0
- lockss/pyclient/poller/configuration.py +254 -0
- lockss/pyclient/poller/models/__init__.py +41 -0
- lockss/pyclient/poller/models/api_status.py +344 -0
- lockss/pyclient/poller/models/aus_import_body.py +199 -0
- lockss/pyclient/poller/models/cached_uri_set_spec.py +169 -0
- lockss/pyclient/poller/models/error_result.py +164 -0
- lockss/pyclient/poller/models/hasher_ws_params.py +432 -0
- lockss/pyclient/poller/models/link_desc.py +141 -0
- lockss/pyclient/poller/models/page_desc.py +227 -0
- lockss/pyclient/poller/models/peer_data.py +638 -0
- lockss/pyclient/poller/models/peer_ws_result.py +113 -0
- lockss/pyclient/poller/models/poll_desc.py +285 -0
- lockss/pyclient/poller/models/poll_ws_result.py +142 -0
- lockss/pyclient/poller/models/poller_detail.py +613 -0
- lockss/pyclient/poller/models/poller_pager.py +139 -0
- lockss/pyclient/poller/models/poller_summary.py +452 -0
- lockss/pyclient/poller/models/repair_data.py +176 -0
- lockss/pyclient/poller/models/repair_pager.py +139 -0
- lockss/pyclient/poller/models/repair_queue.py +249 -0
- lockss/pyclient/poller/models/repository_space_ws_result.py +113 -0
- lockss/pyclient/poller/models/repository_ws_result.py +113 -0
- lockss/pyclient/poller/models/tally_data.py +471 -0
- lockss/pyclient/poller/models/url_pager.py +139 -0
- lockss/pyclient/poller/models/vote_ws_result.py +142 -0
- lockss/pyclient/poller/models/voter_detail.py +701 -0
- lockss/pyclient/poller/models/voter_pager.py +139 -0
- lockss/pyclient/poller/models/voter_summary.py +284 -0
- lockss/pyclient/poller/rest.py +317 -0
- lockss/pyclient/rs/__init__.py +41 -0
- lockss/pyclient/rs/api/__init__.py +10 -0
- lockss/pyclient/rs/api/artifacts_api.py +988 -0
- lockss/pyclient/rs/api/aus_api.py +334 -0
- lockss/pyclient/rs/api/repo_api.py +379 -0
- lockss/pyclient/rs/api/status_api.py +120 -0
- lockss/pyclient/rs/api/wayback_api.py +386 -0
- lockss/pyclient/rs/api_client.py +632 -0
- lockss/pyclient/rs/configuration.py +247 -0
- lockss/pyclient/rs/models/__init__.py +31 -0
- lockss/pyclient/rs/models/api_status.py +344 -0
- lockss/pyclient/rs/models/archives_body.py +142 -0
- lockss/pyclient/rs/models/artifact.py +344 -0
- lockss/pyclient/rs/models/artifact_page_info.py +140 -0
- lockss/pyclient/rs/models/artifact_properties.py +344 -0
- lockss/pyclient/rs/models/artifacts_body.py +170 -0
- lockss/pyclient/rs/models/au_size.py +162 -0
- lockss/pyclient/rs/models/auid_page_info.py +140 -0
- lockss/pyclient/rs/models/error_result.py +164 -0
- lockss/pyclient/rs/models/import_status.py +298 -0
- lockss/pyclient/rs/models/page_info.py +229 -0
- lockss/pyclient/rs/models/repository_info.py +164 -0
- lockss/pyclient/rs/models/repository_statistics.py +112 -0
- lockss/pyclient/rs/models/storage_info.py +287 -0
- lockss/pyclient/rs/models/streaming_response_body.py +84 -0
- lockss/pyclient/rs/rest.py +317 -0
- lockss_pyclient-0.1.0.dev1.dist-info/LICENSE +27 -0
- lockss_pyclient-0.1.0.dev1.dist-info/METADATA +29 -0
- lockss_pyclient-0.1.0.dev1.dist-info/RECORD +148 -0
- lockss_pyclient-0.1.0.dev1.dist-info/WHEEL +4 -0
|
@@ -0,0 +1,780 @@
|
|
|
1
|
+
# coding: utf-8
|
|
2
|
+
|
|
3
|
+
"""
|
|
4
|
+
LOCKSS Crawler Service REST API
|
|
5
|
+
|
|
6
|
+
REST API of the LOCKSS Crawler Service # noqa: E501
|
|
7
|
+
|
|
8
|
+
OpenAPI spec version: 2.0.0
|
|
9
|
+
Contact: lockss-support@lockss.org
|
|
10
|
+
Generated by: https://github.com/swagger-api/swagger-codegen.git
|
|
11
|
+
"""
|
|
12
|
+
|
|
13
|
+
import pprint
|
|
14
|
+
import re # noqa: F401
|
|
15
|
+
|
|
16
|
+
import six
|
|
17
|
+
|
|
18
|
+
class CrawlStatus(object):
|
|
19
|
+
"""NOTE: This class is auto generated by the swagger code generator program.
|
|
20
|
+
|
|
21
|
+
Do not edit the class manually.
|
|
22
|
+
"""
|
|
23
|
+
"""
|
|
24
|
+
Attributes:
|
|
25
|
+
swagger_types (dict): The key is attribute name
|
|
26
|
+
and the value is attribute type.
|
|
27
|
+
attribute_map (dict): The key is attribute name
|
|
28
|
+
and the value is json key in definition.
|
|
29
|
+
"""
|
|
30
|
+
swagger_types = {
|
|
31
|
+
'job_id': 'str',
|
|
32
|
+
'au_id': 'str',
|
|
33
|
+
'au_name': 'str',
|
|
34
|
+
'type': 'str',
|
|
35
|
+
'start_urls': 'list[str]',
|
|
36
|
+
'priority': 'int',
|
|
37
|
+
'crawler_id': 'str',
|
|
38
|
+
'sources': 'list[str]',
|
|
39
|
+
'depth': 'int',
|
|
40
|
+
'refetch_depth': 'int',
|
|
41
|
+
'proxy': 'str',
|
|
42
|
+
'start_time': 'int',
|
|
43
|
+
'end_time': 'int',
|
|
44
|
+
'job_status': 'JobStatus',
|
|
45
|
+
'is_waiting': 'bool',
|
|
46
|
+
'is_active': 'bool',
|
|
47
|
+
'is_error': 'bool',
|
|
48
|
+
'bytes_fetched': 'int',
|
|
49
|
+
'fetched_items': 'Counter',
|
|
50
|
+
'excluded_items': 'Counter',
|
|
51
|
+
'not_modified_items': 'Counter',
|
|
52
|
+
'parsed_items': 'Counter',
|
|
53
|
+
'pending_items': 'Counter',
|
|
54
|
+
'errors': 'Counter',
|
|
55
|
+
'mime_types': 'list[MimeCounter]'
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
attribute_map = {
|
|
59
|
+
'job_id': 'jobId',
|
|
60
|
+
'au_id': 'auId',
|
|
61
|
+
'au_name': 'auName',
|
|
62
|
+
'type': 'type',
|
|
63
|
+
'start_urls': 'startUrls',
|
|
64
|
+
'priority': 'priority',
|
|
65
|
+
'crawler_id': 'crawlerId',
|
|
66
|
+
'sources': 'sources',
|
|
67
|
+
'depth': 'depth',
|
|
68
|
+
'refetch_depth': 'refetchDepth',
|
|
69
|
+
'proxy': 'proxy',
|
|
70
|
+
'start_time': 'startTime',
|
|
71
|
+
'end_time': 'endTime',
|
|
72
|
+
'job_status': 'jobStatus',
|
|
73
|
+
'is_waiting': 'isWaiting',
|
|
74
|
+
'is_active': 'isActive',
|
|
75
|
+
'is_error': 'isError',
|
|
76
|
+
'bytes_fetched': 'bytesFetched',
|
|
77
|
+
'fetched_items': 'fetchedItems',
|
|
78
|
+
'excluded_items': 'excludedItems',
|
|
79
|
+
'not_modified_items': 'notModifiedItems',
|
|
80
|
+
'parsed_items': 'parsedItems',
|
|
81
|
+
'pending_items': 'pendingItems',
|
|
82
|
+
'errors': 'errors',
|
|
83
|
+
'mime_types': 'mimeTypes'
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
def __init__(self, job_id=None, au_id=None, au_name=None, type=None, start_urls=None, priority=None, crawler_id='classic', sources=None, depth=None, refetch_depth=None, proxy=None, start_time=None, end_time=None, job_status=None, is_waiting=None, is_active=None, is_error=None, bytes_fetched=None, fetched_items=None, excluded_items=None, not_modified_items=None, parsed_items=None, pending_items=None, errors=None, mime_types=None): # noqa: E501
|
|
87
|
+
"""CrawlStatus - a model defined in Swagger""" # noqa: E501
|
|
88
|
+
self._job_id = None
|
|
89
|
+
self._au_id = None
|
|
90
|
+
self._au_name = None
|
|
91
|
+
self._type = None
|
|
92
|
+
self._start_urls = None
|
|
93
|
+
self._priority = None
|
|
94
|
+
self._crawler_id = None
|
|
95
|
+
self._sources = None
|
|
96
|
+
self._depth = None
|
|
97
|
+
self._refetch_depth = None
|
|
98
|
+
self._proxy = None
|
|
99
|
+
self._start_time = None
|
|
100
|
+
self._end_time = None
|
|
101
|
+
self._job_status = None
|
|
102
|
+
self._is_waiting = None
|
|
103
|
+
self._is_active = None
|
|
104
|
+
self._is_error = None
|
|
105
|
+
self._bytes_fetched = None
|
|
106
|
+
self._fetched_items = None
|
|
107
|
+
self._excluded_items = None
|
|
108
|
+
self._not_modified_items = None
|
|
109
|
+
self._parsed_items = None
|
|
110
|
+
self._pending_items = None
|
|
111
|
+
self._errors = None
|
|
112
|
+
self._mime_types = None
|
|
113
|
+
self.discriminator = None
|
|
114
|
+
self.job_id = job_id
|
|
115
|
+
self.au_id = au_id
|
|
116
|
+
self.au_name = au_name
|
|
117
|
+
self.type = type
|
|
118
|
+
self.start_urls = start_urls
|
|
119
|
+
self.priority = priority
|
|
120
|
+
self.crawler_id = crawler_id
|
|
121
|
+
if sources is not None:
|
|
122
|
+
self.sources = sources
|
|
123
|
+
if depth is not None:
|
|
124
|
+
self.depth = depth
|
|
125
|
+
if refetch_depth is not None:
|
|
126
|
+
self.refetch_depth = refetch_depth
|
|
127
|
+
if proxy is not None:
|
|
128
|
+
self.proxy = proxy
|
|
129
|
+
self.start_time = start_time
|
|
130
|
+
self.end_time = end_time
|
|
131
|
+
self.job_status = job_status
|
|
132
|
+
if is_waiting is not None:
|
|
133
|
+
self.is_waiting = is_waiting
|
|
134
|
+
if is_active is not None:
|
|
135
|
+
self.is_active = is_active
|
|
136
|
+
if is_error is not None:
|
|
137
|
+
self.is_error = is_error
|
|
138
|
+
if bytes_fetched is not None:
|
|
139
|
+
self.bytes_fetched = bytes_fetched
|
|
140
|
+
if fetched_items is not None:
|
|
141
|
+
self.fetched_items = fetched_items
|
|
142
|
+
if excluded_items is not None:
|
|
143
|
+
self.excluded_items = excluded_items
|
|
144
|
+
if not_modified_items is not None:
|
|
145
|
+
self.not_modified_items = not_modified_items
|
|
146
|
+
if parsed_items is not None:
|
|
147
|
+
self.parsed_items = parsed_items
|
|
148
|
+
if pending_items is not None:
|
|
149
|
+
self.pending_items = pending_items
|
|
150
|
+
if errors is not None:
|
|
151
|
+
self.errors = errors
|
|
152
|
+
if mime_types is not None:
|
|
153
|
+
self.mime_types = mime_types
|
|
154
|
+
|
|
155
|
+
@property
|
|
156
|
+
def job_id(self):
|
|
157
|
+
"""Gets the job_id of this CrawlStatus. # noqa: E501
|
|
158
|
+
|
|
159
|
+
The id for the crawl. # noqa: E501
|
|
160
|
+
|
|
161
|
+
:return: The job_id of this CrawlStatus. # noqa: E501
|
|
162
|
+
:rtype: str
|
|
163
|
+
"""
|
|
164
|
+
return self._job_id
|
|
165
|
+
|
|
166
|
+
@job_id.setter
|
|
167
|
+
def job_id(self, job_id):
|
|
168
|
+
"""Sets the job_id of this CrawlStatus.
|
|
169
|
+
|
|
170
|
+
The id for the crawl. # noqa: E501
|
|
171
|
+
|
|
172
|
+
:param job_id: The job_id of this CrawlStatus. # noqa: E501
|
|
173
|
+
:type: str
|
|
174
|
+
"""
|
|
175
|
+
if job_id is None:
|
|
176
|
+
raise ValueError("Invalid value for `job_id`, must not be `None`") # noqa: E501
|
|
177
|
+
|
|
178
|
+
self._job_id = job_id
|
|
179
|
+
|
|
180
|
+
@property
|
|
181
|
+
def au_id(self):
|
|
182
|
+
"""Gets the au_id of this CrawlStatus. # noqa: E501
|
|
183
|
+
|
|
184
|
+
The id for the au. # noqa: E501
|
|
185
|
+
|
|
186
|
+
:return: The au_id of this CrawlStatus. # noqa: E501
|
|
187
|
+
:rtype: str
|
|
188
|
+
"""
|
|
189
|
+
return self._au_id
|
|
190
|
+
|
|
191
|
+
@au_id.setter
|
|
192
|
+
def au_id(self, au_id):
|
|
193
|
+
"""Sets the au_id of this CrawlStatus.
|
|
194
|
+
|
|
195
|
+
The id for the au. # noqa: E501
|
|
196
|
+
|
|
197
|
+
:param au_id: The au_id of this CrawlStatus. # noqa: E501
|
|
198
|
+
:type: str
|
|
199
|
+
"""
|
|
200
|
+
if au_id is None:
|
|
201
|
+
raise ValueError("Invalid value for `au_id`, must not be `None`") # noqa: E501
|
|
202
|
+
|
|
203
|
+
self._au_id = au_id
|
|
204
|
+
|
|
205
|
+
@property
|
|
206
|
+
def au_name(self):
|
|
207
|
+
"""Gets the au_name of this CrawlStatus. # noqa: E501
|
|
208
|
+
|
|
209
|
+
The name for the au. # noqa: E501
|
|
210
|
+
|
|
211
|
+
:return: The au_name of this CrawlStatus. # noqa: E501
|
|
212
|
+
:rtype: str
|
|
213
|
+
"""
|
|
214
|
+
return self._au_name
|
|
215
|
+
|
|
216
|
+
@au_name.setter
|
|
217
|
+
def au_name(self, au_name):
|
|
218
|
+
"""Sets the au_name of this CrawlStatus.
|
|
219
|
+
|
|
220
|
+
The name for the au. # noqa: E501
|
|
221
|
+
|
|
222
|
+
:param au_name: The au_name of this CrawlStatus. # noqa: E501
|
|
223
|
+
:type: str
|
|
224
|
+
"""
|
|
225
|
+
if au_name is None:
|
|
226
|
+
raise ValueError("Invalid value for `au_name`, must not be `None`") # noqa: E501
|
|
227
|
+
|
|
228
|
+
self._au_name = au_name
|
|
229
|
+
|
|
230
|
+
@property
|
|
231
|
+
def type(self):
|
|
232
|
+
"""Gets the type of this CrawlStatus. # noqa: E501
|
|
233
|
+
|
|
234
|
+
The type of crawl. # noqa: E501
|
|
235
|
+
|
|
236
|
+
:return: The type of this CrawlStatus. # noqa: E501
|
|
237
|
+
:rtype: str
|
|
238
|
+
"""
|
|
239
|
+
return self._type
|
|
240
|
+
|
|
241
|
+
@type.setter
|
|
242
|
+
def type(self, type):
|
|
243
|
+
"""Sets the type of this CrawlStatus.
|
|
244
|
+
|
|
245
|
+
The type of crawl. # noqa: E501
|
|
246
|
+
|
|
247
|
+
:param type: The type of this CrawlStatus. # noqa: E501
|
|
248
|
+
:type: str
|
|
249
|
+
"""
|
|
250
|
+
if type is None:
|
|
251
|
+
raise ValueError("Invalid value for `type`, must not be `None`") # noqa: E501
|
|
252
|
+
|
|
253
|
+
self._type = type
|
|
254
|
+
|
|
255
|
+
@property
|
|
256
|
+
def start_urls(self):
|
|
257
|
+
"""Gets the start_urls of this CrawlStatus. # noqa: E501
|
|
258
|
+
|
|
259
|
+
The array of start urls. # noqa: E501
|
|
260
|
+
|
|
261
|
+
:return: The start_urls of this CrawlStatus. # noqa: E501
|
|
262
|
+
:rtype: list[str]
|
|
263
|
+
"""
|
|
264
|
+
return self._start_urls
|
|
265
|
+
|
|
266
|
+
@start_urls.setter
|
|
267
|
+
def start_urls(self, start_urls):
|
|
268
|
+
"""Sets the start_urls of this CrawlStatus.
|
|
269
|
+
|
|
270
|
+
The array of start urls. # noqa: E501
|
|
271
|
+
|
|
272
|
+
:param start_urls: The start_urls of this CrawlStatus. # noqa: E501
|
|
273
|
+
:type: list[str]
|
|
274
|
+
"""
|
|
275
|
+
if start_urls is None:
|
|
276
|
+
raise ValueError("Invalid value for `start_urls`, must not be `None`") # noqa: E501
|
|
277
|
+
|
|
278
|
+
self._start_urls = start_urls
|
|
279
|
+
|
|
280
|
+
@property
|
|
281
|
+
def priority(self):
|
|
282
|
+
"""Gets the priority of this CrawlStatus. # noqa: E501
|
|
283
|
+
|
|
284
|
+
The priority for this crawl. # noqa: E501
|
|
285
|
+
|
|
286
|
+
:return: The priority of this CrawlStatus. # noqa: E501
|
|
287
|
+
:rtype: int
|
|
288
|
+
"""
|
|
289
|
+
return self._priority
|
|
290
|
+
|
|
291
|
+
@priority.setter
|
|
292
|
+
def priority(self, priority):
|
|
293
|
+
"""Sets the priority of this CrawlStatus.
|
|
294
|
+
|
|
295
|
+
The priority for this crawl. # noqa: E501
|
|
296
|
+
|
|
297
|
+
:param priority: The priority of this CrawlStatus. # noqa: E501
|
|
298
|
+
:type: int
|
|
299
|
+
"""
|
|
300
|
+
if priority is None:
|
|
301
|
+
raise ValueError("Invalid value for `priority`, must not be `None`") # noqa: E501
|
|
302
|
+
|
|
303
|
+
self._priority = priority
|
|
304
|
+
|
|
305
|
+
@property
|
|
306
|
+
def crawler_id(self):
|
|
307
|
+
"""Gets the crawler_id of this CrawlStatus. # noqa: E501
|
|
308
|
+
|
|
309
|
+
The id of the crawler used for this crawl. # noqa: E501
|
|
310
|
+
|
|
311
|
+
:return: The crawler_id of this CrawlStatus. # noqa: E501
|
|
312
|
+
:rtype: str
|
|
313
|
+
"""
|
|
314
|
+
return self._crawler_id
|
|
315
|
+
|
|
316
|
+
@crawler_id.setter
|
|
317
|
+
def crawler_id(self, crawler_id):
|
|
318
|
+
"""Sets the crawler_id of this CrawlStatus.
|
|
319
|
+
|
|
320
|
+
The id of the crawler used for this crawl. # noqa: E501
|
|
321
|
+
|
|
322
|
+
:param crawler_id: The crawler_id of this CrawlStatus. # noqa: E501
|
|
323
|
+
:type: str
|
|
324
|
+
"""
|
|
325
|
+
if crawler_id is None:
|
|
326
|
+
raise ValueError("Invalid value for `crawler_id`, must not be `None`") # noqa: E501
|
|
327
|
+
|
|
328
|
+
self._crawler_id = crawler_id
|
|
329
|
+
|
|
330
|
+
@property
|
|
331
|
+
def sources(self):
|
|
332
|
+
"""Gets the sources of this CrawlStatus. # noqa: E501
|
|
333
|
+
|
|
334
|
+
The sources to use for the crawl. # noqa: E501
|
|
335
|
+
|
|
336
|
+
:return: The sources of this CrawlStatus. # noqa: E501
|
|
337
|
+
:rtype: list[str]
|
|
338
|
+
"""
|
|
339
|
+
return self._sources
|
|
340
|
+
|
|
341
|
+
@sources.setter
|
|
342
|
+
def sources(self, sources):
|
|
343
|
+
"""Sets the sources of this CrawlStatus.
|
|
344
|
+
|
|
345
|
+
The sources to use for the crawl. # noqa: E501
|
|
346
|
+
|
|
347
|
+
:param sources: The sources of this CrawlStatus. # noqa: E501
|
|
348
|
+
:type: list[str]
|
|
349
|
+
"""
|
|
350
|
+
|
|
351
|
+
self._sources = sources
|
|
352
|
+
|
|
353
|
+
@property
|
|
354
|
+
def depth(self):
|
|
355
|
+
"""Gets the depth of this CrawlStatus. # noqa: E501
|
|
356
|
+
|
|
357
|
+
The depth of the crawl. # noqa: E501
|
|
358
|
+
|
|
359
|
+
:return: The depth of this CrawlStatus. # noqa: E501
|
|
360
|
+
:rtype: int
|
|
361
|
+
"""
|
|
362
|
+
return self._depth
|
|
363
|
+
|
|
364
|
+
@depth.setter
|
|
365
|
+
def depth(self, depth):
|
|
366
|
+
"""Sets the depth of this CrawlStatus.
|
|
367
|
+
|
|
368
|
+
The depth of the crawl. # noqa: E501
|
|
369
|
+
|
|
370
|
+
:param depth: The depth of this CrawlStatus. # noqa: E501
|
|
371
|
+
:type: int
|
|
372
|
+
"""
|
|
373
|
+
|
|
374
|
+
self._depth = depth
|
|
375
|
+
|
|
376
|
+
@property
|
|
377
|
+
def refetch_depth(self):
|
|
378
|
+
"""Gets the refetch_depth of this CrawlStatus. # noqa: E501
|
|
379
|
+
|
|
380
|
+
The refetch depth of the crawl. # noqa: E501
|
|
381
|
+
|
|
382
|
+
:return: The refetch_depth of this CrawlStatus. # noqa: E501
|
|
383
|
+
:rtype: int
|
|
384
|
+
"""
|
|
385
|
+
return self._refetch_depth
|
|
386
|
+
|
|
387
|
+
@refetch_depth.setter
|
|
388
|
+
def refetch_depth(self, refetch_depth):
|
|
389
|
+
"""Sets the refetch_depth of this CrawlStatus.
|
|
390
|
+
|
|
391
|
+
The refetch depth of the crawl. # noqa: E501
|
|
392
|
+
|
|
393
|
+
:param refetch_depth: The refetch_depth of this CrawlStatus. # noqa: E501
|
|
394
|
+
:type: int
|
|
395
|
+
"""
|
|
396
|
+
|
|
397
|
+
self._refetch_depth = refetch_depth
|
|
398
|
+
|
|
399
|
+
@property
|
|
400
|
+
def proxy(self):
|
|
401
|
+
"""Gets the proxy of this CrawlStatus. # noqa: E501
|
|
402
|
+
|
|
403
|
+
The proxy used for crawling. # noqa: E501
|
|
404
|
+
|
|
405
|
+
:return: The proxy of this CrawlStatus. # noqa: E501
|
|
406
|
+
:rtype: str
|
|
407
|
+
"""
|
|
408
|
+
return self._proxy
|
|
409
|
+
|
|
410
|
+
@proxy.setter
|
|
411
|
+
def proxy(self, proxy):
|
|
412
|
+
"""Sets the proxy of this CrawlStatus.
|
|
413
|
+
|
|
414
|
+
The proxy used for crawling. # noqa: E501
|
|
415
|
+
|
|
416
|
+
:param proxy: The proxy of this CrawlStatus. # noqa: E501
|
|
417
|
+
:type: str
|
|
418
|
+
"""
|
|
419
|
+
|
|
420
|
+
self._proxy = proxy
|
|
421
|
+
|
|
422
|
+
@property
|
|
423
|
+
def start_time(self):
|
|
424
|
+
"""Gets the start_time of this CrawlStatus. # noqa: E501
|
|
425
|
+
|
|
426
|
+
The timestamp for the start of crawl. # noqa: E501
|
|
427
|
+
|
|
428
|
+
:return: The start_time of this CrawlStatus. # noqa: E501
|
|
429
|
+
:rtype: int
|
|
430
|
+
"""
|
|
431
|
+
return self._start_time
|
|
432
|
+
|
|
433
|
+
@start_time.setter
|
|
434
|
+
def start_time(self, start_time):
|
|
435
|
+
"""Sets the start_time of this CrawlStatus.
|
|
436
|
+
|
|
437
|
+
The timestamp for the start of crawl. # noqa: E501
|
|
438
|
+
|
|
439
|
+
:param start_time: The start_time of this CrawlStatus. # noqa: E501
|
|
440
|
+
:type: int
|
|
441
|
+
"""
|
|
442
|
+
if start_time is None:
|
|
443
|
+
raise ValueError("Invalid value for `start_time`, must not be `None`") # noqa: E501
|
|
444
|
+
|
|
445
|
+
self._start_time = start_time
|
|
446
|
+
|
|
447
|
+
@property
|
|
448
|
+
def end_time(self):
|
|
449
|
+
"""Gets the end_time of this CrawlStatus. # noqa: E501
|
|
450
|
+
|
|
451
|
+
The timestamp for the end of the crawl. # noqa: E501
|
|
452
|
+
|
|
453
|
+
:return: The end_time of this CrawlStatus. # noqa: E501
|
|
454
|
+
:rtype: int
|
|
455
|
+
"""
|
|
456
|
+
return self._end_time
|
|
457
|
+
|
|
458
|
+
@end_time.setter
|
|
459
|
+
def end_time(self, end_time):
|
|
460
|
+
"""Sets the end_time of this CrawlStatus.
|
|
461
|
+
|
|
462
|
+
The timestamp for the end of the crawl. # noqa: E501
|
|
463
|
+
|
|
464
|
+
:param end_time: The end_time of this CrawlStatus. # noqa: E501
|
|
465
|
+
:type: int
|
|
466
|
+
"""
|
|
467
|
+
if end_time is None:
|
|
468
|
+
raise ValueError("Invalid value for `end_time`, must not be `None`") # noqa: E501
|
|
469
|
+
|
|
470
|
+
self._end_time = end_time
|
|
471
|
+
|
|
472
|
+
@property
|
|
473
|
+
def job_status(self):
|
|
474
|
+
"""Gets the job_status of this CrawlStatus. # noqa: E501
|
|
475
|
+
|
|
476
|
+
|
|
477
|
+
:return: The job_status of this CrawlStatus. # noqa: E501
|
|
478
|
+
:rtype: JobStatus
|
|
479
|
+
"""
|
|
480
|
+
return self._job_status
|
|
481
|
+
|
|
482
|
+
@job_status.setter
|
|
483
|
+
def job_status(self, job_status):
|
|
484
|
+
"""Sets the job_status of this CrawlStatus.
|
|
485
|
+
|
|
486
|
+
|
|
487
|
+
:param job_status: The job_status of this CrawlStatus. # noqa: E501
|
|
488
|
+
:type: JobStatus
|
|
489
|
+
"""
|
|
490
|
+
if job_status is None:
|
|
491
|
+
raise ValueError("Invalid value for `job_status`, must not be `None`") # noqa: E501
|
|
492
|
+
|
|
493
|
+
self._job_status = job_status
|
|
494
|
+
|
|
495
|
+
@property
|
|
496
|
+
def is_waiting(self):
|
|
497
|
+
"""Gets the is_waiting of this CrawlStatus. # noqa: E501
|
|
498
|
+
|
|
499
|
+
True if the crawl waiting to start. # noqa: E501
|
|
500
|
+
|
|
501
|
+
:return: The is_waiting of this CrawlStatus. # noqa: E501
|
|
502
|
+
:rtype: bool
|
|
503
|
+
"""
|
|
504
|
+
return self._is_waiting
|
|
505
|
+
|
|
506
|
+
@is_waiting.setter
|
|
507
|
+
def is_waiting(self, is_waiting):
|
|
508
|
+
"""Sets the is_waiting of this CrawlStatus.
|
|
509
|
+
|
|
510
|
+
True if the crawl waiting to start. # noqa: E501
|
|
511
|
+
|
|
512
|
+
:param is_waiting: The is_waiting of this CrawlStatus. # noqa: E501
|
|
513
|
+
:type: bool
|
|
514
|
+
"""
|
|
515
|
+
|
|
516
|
+
self._is_waiting = is_waiting
|
|
517
|
+
|
|
518
|
+
@property
|
|
519
|
+
def is_active(self):
|
|
520
|
+
"""Gets the is_active of this CrawlStatus. # noqa: E501
|
|
521
|
+
|
|
522
|
+
True if the crawl is active. # noqa: E501
|
|
523
|
+
|
|
524
|
+
:return: The is_active of this CrawlStatus. # noqa: E501
|
|
525
|
+
:rtype: bool
|
|
526
|
+
"""
|
|
527
|
+
return self._is_active
|
|
528
|
+
|
|
529
|
+
@is_active.setter
|
|
530
|
+
def is_active(self, is_active):
|
|
531
|
+
"""Sets the is_active of this CrawlStatus.
|
|
532
|
+
|
|
533
|
+
True if the crawl is active. # noqa: E501
|
|
534
|
+
|
|
535
|
+
:param is_active: The is_active of this CrawlStatus. # noqa: E501
|
|
536
|
+
:type: bool
|
|
537
|
+
"""
|
|
538
|
+
|
|
539
|
+
self._is_active = is_active
|
|
540
|
+
|
|
541
|
+
@property
|
|
542
|
+
def is_error(self):
|
|
543
|
+
"""Gets the is_error of this CrawlStatus. # noqa: E501
|
|
544
|
+
|
|
545
|
+
True if the crawl has errored. # noqa: E501
|
|
546
|
+
|
|
547
|
+
:return: The is_error of this CrawlStatus. # noqa: E501
|
|
548
|
+
:rtype: bool
|
|
549
|
+
"""
|
|
550
|
+
return self._is_error
|
|
551
|
+
|
|
552
|
+
@is_error.setter
|
|
553
|
+
def is_error(self, is_error):
|
|
554
|
+
"""Sets the is_error of this CrawlStatus.
|
|
555
|
+
|
|
556
|
+
True if the crawl has errored. # noqa: E501
|
|
557
|
+
|
|
558
|
+
:param is_error: The is_error of this CrawlStatus. # noqa: E501
|
|
559
|
+
:type: bool
|
|
560
|
+
"""
|
|
561
|
+
|
|
562
|
+
self._is_error = is_error
|
|
563
|
+
|
|
564
|
+
@property
|
|
565
|
+
def bytes_fetched(self):
|
|
566
|
+
"""Gets the bytes_fetched of this CrawlStatus. # noqa: E501
|
|
567
|
+
|
|
568
|
+
The number of bytes fetched. # noqa: E501
|
|
569
|
+
|
|
570
|
+
:return: The bytes_fetched of this CrawlStatus. # noqa: E501
|
|
571
|
+
:rtype: int
|
|
572
|
+
"""
|
|
573
|
+
return self._bytes_fetched
|
|
574
|
+
|
|
575
|
+
@bytes_fetched.setter
|
|
576
|
+
def bytes_fetched(self, bytes_fetched):
|
|
577
|
+
"""Sets the bytes_fetched of this CrawlStatus.
|
|
578
|
+
|
|
579
|
+
The number of bytes fetched. # noqa: E501
|
|
580
|
+
|
|
581
|
+
:param bytes_fetched: The bytes_fetched of this CrawlStatus. # noqa: E501
|
|
582
|
+
:type: int
|
|
583
|
+
"""
|
|
584
|
+
|
|
585
|
+
self._bytes_fetched = bytes_fetched
|
|
586
|
+
|
|
587
|
+
@property
|
|
588
|
+
def fetched_items(self):
|
|
589
|
+
"""Gets the fetched_items of this CrawlStatus. # noqa: E501
|
|
590
|
+
|
|
591
|
+
|
|
592
|
+
:return: The fetched_items of this CrawlStatus. # noqa: E501
|
|
593
|
+
:rtype: Counter
|
|
594
|
+
"""
|
|
595
|
+
return self._fetched_items
|
|
596
|
+
|
|
597
|
+
@fetched_items.setter
|
|
598
|
+
def fetched_items(self, fetched_items):
|
|
599
|
+
"""Sets the fetched_items of this CrawlStatus.
|
|
600
|
+
|
|
601
|
+
|
|
602
|
+
:param fetched_items: The fetched_items of this CrawlStatus. # noqa: E501
|
|
603
|
+
:type: Counter
|
|
604
|
+
"""
|
|
605
|
+
|
|
606
|
+
self._fetched_items = fetched_items
|
|
607
|
+
|
|
608
|
+
@property
|
|
609
|
+
def excluded_items(self):
|
|
610
|
+
"""Gets the excluded_items of this CrawlStatus. # noqa: E501
|
|
611
|
+
|
|
612
|
+
|
|
613
|
+
:return: The excluded_items of this CrawlStatus. # noqa: E501
|
|
614
|
+
:rtype: Counter
|
|
615
|
+
"""
|
|
616
|
+
return self._excluded_items
|
|
617
|
+
|
|
618
|
+
@excluded_items.setter
|
|
619
|
+
def excluded_items(self, excluded_items):
|
|
620
|
+
"""Sets the excluded_items of this CrawlStatus.
|
|
621
|
+
|
|
622
|
+
|
|
623
|
+
:param excluded_items: The excluded_items of this CrawlStatus. # noqa: E501
|
|
624
|
+
:type: Counter
|
|
625
|
+
"""
|
|
626
|
+
|
|
627
|
+
self._excluded_items = excluded_items
|
|
628
|
+
|
|
629
|
+
@property
|
|
630
|
+
def not_modified_items(self):
|
|
631
|
+
"""Gets the not_modified_items of this CrawlStatus. # noqa: E501
|
|
632
|
+
|
|
633
|
+
|
|
634
|
+
:return: The not_modified_items of this CrawlStatus. # noqa: E501
|
|
635
|
+
:rtype: Counter
|
|
636
|
+
"""
|
|
637
|
+
return self._not_modified_items
|
|
638
|
+
|
|
639
|
+
@not_modified_items.setter
|
|
640
|
+
def not_modified_items(self, not_modified_items):
|
|
641
|
+
"""Sets the not_modified_items of this CrawlStatus.
|
|
642
|
+
|
|
643
|
+
|
|
644
|
+
:param not_modified_items: The not_modified_items of this CrawlStatus. # noqa: E501
|
|
645
|
+
:type: Counter
|
|
646
|
+
"""
|
|
647
|
+
|
|
648
|
+
self._not_modified_items = not_modified_items
|
|
649
|
+
|
|
650
|
+
@property
|
|
651
|
+
def parsed_items(self):
|
|
652
|
+
"""Gets the parsed_items of this CrawlStatus. # noqa: E501
|
|
653
|
+
|
|
654
|
+
|
|
655
|
+
:return: The parsed_items of this CrawlStatus. # noqa: E501
|
|
656
|
+
:rtype: Counter
|
|
657
|
+
"""
|
|
658
|
+
return self._parsed_items
|
|
659
|
+
|
|
660
|
+
@parsed_items.setter
|
|
661
|
+
def parsed_items(self, parsed_items):
|
|
662
|
+
"""Sets the parsed_items of this CrawlStatus.
|
|
663
|
+
|
|
664
|
+
|
|
665
|
+
:param parsed_items: The parsed_items of this CrawlStatus. # noqa: E501
|
|
666
|
+
:type: Counter
|
|
667
|
+
"""
|
|
668
|
+
|
|
669
|
+
self._parsed_items = parsed_items
|
|
670
|
+
|
|
671
|
+
@property
|
|
672
|
+
def pending_items(self):
|
|
673
|
+
"""Gets the pending_items of this CrawlStatus. # noqa: E501
|
|
674
|
+
|
|
675
|
+
|
|
676
|
+
:return: The pending_items of this CrawlStatus. # noqa: E501
|
|
677
|
+
:rtype: Counter
|
|
678
|
+
"""
|
|
679
|
+
return self._pending_items
|
|
680
|
+
|
|
681
|
+
@pending_items.setter
|
|
682
|
+
def pending_items(self, pending_items):
|
|
683
|
+
"""Sets the pending_items of this CrawlStatus.
|
|
684
|
+
|
|
685
|
+
|
|
686
|
+
:param pending_items: The pending_items of this CrawlStatus. # noqa: E501
|
|
687
|
+
:type: Counter
|
|
688
|
+
"""
|
|
689
|
+
|
|
690
|
+
self._pending_items = pending_items
|
|
691
|
+
|
|
692
|
+
@property
|
|
693
|
+
def errors(self):
|
|
694
|
+
"""Gets the errors of this CrawlStatus. # noqa: E501
|
|
695
|
+
|
|
696
|
+
|
|
697
|
+
:return: The errors of this CrawlStatus. # noqa: E501
|
|
698
|
+
:rtype: Counter
|
|
699
|
+
"""
|
|
700
|
+
return self._errors
|
|
701
|
+
|
|
702
|
+
@errors.setter
|
|
703
|
+
def errors(self, errors):
|
|
704
|
+
"""Sets the errors of this CrawlStatus.
|
|
705
|
+
|
|
706
|
+
|
|
707
|
+
:param errors: The errors of this CrawlStatus. # noqa: E501
|
|
708
|
+
:type: Counter
|
|
709
|
+
"""
|
|
710
|
+
|
|
711
|
+
self._errors = errors
|
|
712
|
+
|
|
713
|
+
@property
|
|
714
|
+
def mime_types(self):
|
|
715
|
+
"""Gets the mime_types of this CrawlStatus. # noqa: E501
|
|
716
|
+
|
|
717
|
+
The list of urls by mimeType. # noqa: E501
|
|
718
|
+
|
|
719
|
+
:return: The mime_types of this CrawlStatus. # noqa: E501
|
|
720
|
+
:rtype: list[MimeCounter]
|
|
721
|
+
"""
|
|
722
|
+
return self._mime_types
|
|
723
|
+
|
|
724
|
+
@mime_types.setter
|
|
725
|
+
def mime_types(self, mime_types):
|
|
726
|
+
"""Sets the mime_types of this CrawlStatus.
|
|
727
|
+
|
|
728
|
+
The list of urls by mimeType. # noqa: E501
|
|
729
|
+
|
|
730
|
+
:param mime_types: The mime_types of this CrawlStatus. # noqa: E501
|
|
731
|
+
:type: list[MimeCounter]
|
|
732
|
+
"""
|
|
733
|
+
|
|
734
|
+
self._mime_types = mime_types
|
|
735
|
+
|
|
736
|
+
def to_dict(self):
|
|
737
|
+
"""Returns the model properties as a dict"""
|
|
738
|
+
result = {}
|
|
739
|
+
|
|
740
|
+
for attr, _ in six.iteritems(self.swagger_types):
|
|
741
|
+
value = getattr(self, attr)
|
|
742
|
+
if isinstance(value, list):
|
|
743
|
+
result[attr] = list(map(
|
|
744
|
+
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
|
|
745
|
+
value
|
|
746
|
+
))
|
|
747
|
+
elif hasattr(value, "to_dict"):
|
|
748
|
+
result[attr] = value.to_dict()
|
|
749
|
+
elif isinstance(value, dict):
|
|
750
|
+
result[attr] = dict(map(
|
|
751
|
+
lambda item: (item[0], item[1].to_dict())
|
|
752
|
+
if hasattr(item[1], "to_dict") else item,
|
|
753
|
+
value.items()
|
|
754
|
+
))
|
|
755
|
+
else:
|
|
756
|
+
result[attr] = value
|
|
757
|
+
if issubclass(CrawlStatus, dict):
|
|
758
|
+
for key, value in self.items():
|
|
759
|
+
result[key] = value
|
|
760
|
+
|
|
761
|
+
return result
|
|
762
|
+
|
|
763
|
+
def to_str(self):
|
|
764
|
+
"""Returns the string representation of the model"""
|
|
765
|
+
return pprint.pformat(self.to_dict())
|
|
766
|
+
|
|
767
|
+
def __repr__(self):
|
|
768
|
+
"""For `print` and `pprint`"""
|
|
769
|
+
return self.to_str()
|
|
770
|
+
|
|
771
|
+
def __eq__(self, other):
|
|
772
|
+
"""Returns true if both objects are equal"""
|
|
773
|
+
if not isinstance(other, CrawlStatus):
|
|
774
|
+
return False
|
|
775
|
+
|
|
776
|
+
return self.__dict__ == other.__dict__
|
|
777
|
+
|
|
778
|
+
def __ne__(self, other):
|
|
779
|
+
"""Returns true if both objects are not equal"""
|
|
780
|
+
return not self == other
|