lockss-pyclient 0.1.0.dev1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- lockss/pyclient/__init__.py +67 -0
- lockss/pyclient/config/__init__.py +42 -0
- lockss/pyclient/config/api/__init__.py +12 -0
- lockss/pyclient/config/api/aus_api.py +2195 -0
- lockss/pyclient/config/api/config_api.py +718 -0
- lockss/pyclient/config/api/plugins_api.py +128 -0
- lockss/pyclient/config/api/status_api.py +120 -0
- lockss/pyclient/config/api/tdb_api.py +318 -0
- lockss/pyclient/config/api/users_api.py +516 -0
- lockss/pyclient/config/api/utils_api.py +128 -0
- lockss/pyclient/config/api_client.py +632 -0
- lockss/pyclient/config/configuration.py +254 -0
- lockss/pyclient/config/models/__init__.py +30 -0
- lockss/pyclient/config/models/api_status.py +344 -0
- lockss/pyclient/config/models/au_configuration.py +142 -0
- lockss/pyclient/config/models/au_status.py +113 -0
- lockss/pyclient/config/models/au_ws_result.py +113 -0
- lockss/pyclient/config/models/auids_body.py +168 -0
- lockss/pyclient/config/models/check_substance_result.py +212 -0
- lockss/pyclient/config/models/content_configuration_result.py +200 -0
- lockss/pyclient/config/models/file_section_name_body.py +113 -0
- lockss/pyclient/config/models/platform_configuration_ws_result.py +113 -0
- lockss/pyclient/config/models/plugin_ws_result.py +345 -0
- lockss/pyclient/config/models/request_au_control_result.py +171 -0
- lockss/pyclient/config/models/tdb_au_ws_result.py +360 -0
- lockss/pyclient/config/models/tdb_publisher_ws_result.py +113 -0
- lockss/pyclient/config/models/tdb_title_ws_result.py +390 -0
- lockss/pyclient/config/rest.py +317 -0
- lockss/pyclient/crawler/__init__.py +45 -0
- lockss/pyclient/crawler/api/__init__.py +10 -0
- lockss/pyclient/crawler/api/crawlers_api.py +215 -0
- lockss/pyclient/crawler/api/crawls_api.py +952 -0
- lockss/pyclient/crawler/api/jobs_api.py +504 -0
- lockss/pyclient/crawler/api/status_api.py +120 -0
- lockss/pyclient/crawler/api/ws_api.py +128 -0
- lockss/pyclient/crawler/api_client.py +632 -0
- lockss/pyclient/crawler/configuration.py +254 -0
- lockss/pyclient/crawler/models/__init__.py +35 -0
- lockss/pyclient/crawler/models/api_status.py +344 -0
- lockss/pyclient/crawler/models/counter.py +142 -0
- lockss/pyclient/crawler/models/crawl_desc.py +344 -0
- lockss/pyclient/crawler/models/crawl_job.py +280 -0
- lockss/pyclient/crawler/models/crawl_pager.py +140 -0
- lockss/pyclient/crawler/models/crawl_status.py +780 -0
- lockss/pyclient/crawler/models/crawl_ws_result.py +814 -0
- lockss/pyclient/crawler/models/crawl_ws_result_pages_with_errors.py +162 -0
- lockss/pyclient/crawler/models/crawler_config.py +142 -0
- lockss/pyclient/crawler/models/crawler_status.py +279 -0
- lockss/pyclient/crawler/models/crawler_statuses.py +112 -0
- lockss/pyclient/crawler/models/error_result.py +164 -0
- lockss/pyclient/crawler/models/job_pager.py +140 -0
- lockss/pyclient/crawler/models/job_status.py +147 -0
- lockss/pyclient/crawler/models/mime_counter.py +169 -0
- lockss/pyclient/crawler/models/page_info.py +228 -0
- lockss/pyclient/crawler/models/url_error.py +148 -0
- lockss/pyclient/crawler/models/url_info.py +167 -0
- lockss/pyclient/crawler/models/url_pager.py +140 -0
- lockss/pyclient/crawler/rest.py +317 -0
- lockss/pyclient/md/__init__.py +36 -0
- lockss/pyclient/md/api/__init__.py +9 -0
- lockss/pyclient/md/api/mdupdates_api.py +508 -0
- lockss/pyclient/md/api/metadata_api.py +136 -0
- lockss/pyclient/md/api/status_api.py +120 -0
- lockss/pyclient/md/api/urls_api.py +224 -0
- lockss/pyclient/md/api_client.py +632 -0
- lockss/pyclient/md/configuration.py +254 -0
- lockss/pyclient/md/models/__init__.py +27 -0
- lockss/pyclient/md/models/api_status.py +344 -0
- lockss/pyclient/md/models/au.py +169 -0
- lockss/pyclient/md/models/au_metadata_page_info.py +140 -0
- lockss/pyclient/md/models/error_result.py +164 -0
- lockss/pyclient/md/models/item_metadata.py +196 -0
- lockss/pyclient/md/models/job.py +280 -0
- lockss/pyclient/md/models/job_page_info.py +140 -0
- lockss/pyclient/md/models/metadata_update_spec.py +142 -0
- lockss/pyclient/md/models/page_info.py +228 -0
- lockss/pyclient/md/models/status.py +142 -0
- lockss/pyclient/md/models/url_info.py +142 -0
- lockss/pyclient/md/rest.py +317 -0
- lockss/pyclient/poller/__init__.py +54 -0
- lockss/pyclient/poller/api/__init__.py +13 -0
- lockss/pyclient/poller/api/export_api.py +156 -0
- lockss/pyclient/poller/api/hash_api.py +413 -0
- lockss/pyclient/poller/api/import_api.py +157 -0
- lockss/pyclient/poller/api/poll_detail_api.py +374 -0
- lockss/pyclient/poller/api/poller_polls_api.py +223 -0
- lockss/pyclient/poller/api/repo_api.py +223 -0
- lockss/pyclient/poller/api/service_api.py +694 -0
- lockss/pyclient/poller/api/voter_polls_api.py +223 -0
- lockss/pyclient/poller/api_client.py +632 -0
- lockss/pyclient/poller/configuration.py +254 -0
- lockss/pyclient/poller/models/__init__.py +41 -0
- lockss/pyclient/poller/models/api_status.py +344 -0
- lockss/pyclient/poller/models/aus_import_body.py +199 -0
- lockss/pyclient/poller/models/cached_uri_set_spec.py +169 -0
- lockss/pyclient/poller/models/error_result.py +164 -0
- lockss/pyclient/poller/models/hasher_ws_params.py +432 -0
- lockss/pyclient/poller/models/link_desc.py +141 -0
- lockss/pyclient/poller/models/page_desc.py +227 -0
- lockss/pyclient/poller/models/peer_data.py +638 -0
- lockss/pyclient/poller/models/peer_ws_result.py +113 -0
- lockss/pyclient/poller/models/poll_desc.py +285 -0
- lockss/pyclient/poller/models/poll_ws_result.py +142 -0
- lockss/pyclient/poller/models/poller_detail.py +613 -0
- lockss/pyclient/poller/models/poller_pager.py +139 -0
- lockss/pyclient/poller/models/poller_summary.py +452 -0
- lockss/pyclient/poller/models/repair_data.py +176 -0
- lockss/pyclient/poller/models/repair_pager.py +139 -0
- lockss/pyclient/poller/models/repair_queue.py +249 -0
- lockss/pyclient/poller/models/repository_space_ws_result.py +113 -0
- lockss/pyclient/poller/models/repository_ws_result.py +113 -0
- lockss/pyclient/poller/models/tally_data.py +471 -0
- lockss/pyclient/poller/models/url_pager.py +139 -0
- lockss/pyclient/poller/models/vote_ws_result.py +142 -0
- lockss/pyclient/poller/models/voter_detail.py +701 -0
- lockss/pyclient/poller/models/voter_pager.py +139 -0
- lockss/pyclient/poller/models/voter_summary.py +284 -0
- lockss/pyclient/poller/rest.py +317 -0
- lockss/pyclient/rs/__init__.py +41 -0
- lockss/pyclient/rs/api/__init__.py +10 -0
- lockss/pyclient/rs/api/artifacts_api.py +988 -0
- lockss/pyclient/rs/api/aus_api.py +334 -0
- lockss/pyclient/rs/api/repo_api.py +379 -0
- lockss/pyclient/rs/api/status_api.py +120 -0
- lockss/pyclient/rs/api/wayback_api.py +386 -0
- lockss/pyclient/rs/api_client.py +632 -0
- lockss/pyclient/rs/configuration.py +247 -0
- lockss/pyclient/rs/models/__init__.py +31 -0
- lockss/pyclient/rs/models/api_status.py +344 -0
- lockss/pyclient/rs/models/archives_body.py +142 -0
- lockss/pyclient/rs/models/artifact.py +344 -0
- lockss/pyclient/rs/models/artifact_page_info.py +140 -0
- lockss/pyclient/rs/models/artifact_properties.py +344 -0
- lockss/pyclient/rs/models/artifacts_body.py +170 -0
- lockss/pyclient/rs/models/au_size.py +162 -0
- lockss/pyclient/rs/models/auid_page_info.py +140 -0
- lockss/pyclient/rs/models/error_result.py +164 -0
- lockss/pyclient/rs/models/import_status.py +298 -0
- lockss/pyclient/rs/models/page_info.py +229 -0
- lockss/pyclient/rs/models/repository_info.py +164 -0
- lockss/pyclient/rs/models/repository_statistics.py +112 -0
- lockss/pyclient/rs/models/storage_info.py +287 -0
- lockss/pyclient/rs/models/streaming_response_body.py +84 -0
- lockss/pyclient/rs/rest.py +317 -0
- lockss_pyclient-0.1.0.dev1.dist-info/LICENSE +27 -0
- lockss_pyclient-0.1.0.dev1.dist-info/METADATA +29 -0
- lockss_pyclient-0.1.0.dev1.dist-info/RECORD +148 -0
- lockss_pyclient-0.1.0.dev1.dist-info/WHEEL +4 -0
|
@@ -0,0 +1,142 @@
|
|
|
1
|
+
# coding: utf-8
|
|
2
|
+
|
|
3
|
+
"""
|
|
4
|
+
LOCKSS Crawler Service REST API
|
|
5
|
+
|
|
6
|
+
REST API of the LOCKSS Crawler Service # noqa: E501
|
|
7
|
+
|
|
8
|
+
OpenAPI spec version: 2.0.0
|
|
9
|
+
Contact: lockss-support@lockss.org
|
|
10
|
+
Generated by: https://github.com/swagger-api/swagger-codegen.git
|
|
11
|
+
"""
|
|
12
|
+
|
|
13
|
+
import pprint
|
|
14
|
+
import re # noqa: F401
|
|
15
|
+
|
|
16
|
+
import six
|
|
17
|
+
|
|
18
|
+
class Counter(object):
|
|
19
|
+
"""NOTE: This class is auto generated by the swagger code generator program.
|
|
20
|
+
|
|
21
|
+
Do not edit the class manually.
|
|
22
|
+
"""
|
|
23
|
+
"""
|
|
24
|
+
Attributes:
|
|
25
|
+
swagger_types (dict): The key is attribute name
|
|
26
|
+
and the value is attribute type.
|
|
27
|
+
attribute_map (dict): The key is attribute name
|
|
28
|
+
and the value is json key in definition.
|
|
29
|
+
"""
|
|
30
|
+
swagger_types = {
|
|
31
|
+
'count': 'int',
|
|
32
|
+
'items_link': 'str'
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
attribute_map = {
|
|
36
|
+
'count': 'count',
|
|
37
|
+
'items_link': 'itemsLink'
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
def __init__(self, count=None, items_link=None): # noqa: E501
|
|
41
|
+
"""Counter - a model defined in Swagger""" # noqa: E501
|
|
42
|
+
self._count = None
|
|
43
|
+
self._items_link = None
|
|
44
|
+
self.discriminator = None
|
|
45
|
+
self.count = count
|
|
46
|
+
self.items_link = items_link
|
|
47
|
+
|
|
48
|
+
@property
|
|
49
|
+
def count(self):
|
|
50
|
+
"""Gets the count of this Counter. # noqa: E501
|
|
51
|
+
|
|
52
|
+
The number of elements # noqa: E501
|
|
53
|
+
|
|
54
|
+
:return: The count of this Counter. # noqa: E501
|
|
55
|
+
:rtype: int
|
|
56
|
+
"""
|
|
57
|
+
return self._count
|
|
58
|
+
|
|
59
|
+
@count.setter
|
|
60
|
+
def count(self, count):
|
|
61
|
+
"""Sets the count of this Counter.
|
|
62
|
+
|
|
63
|
+
The number of elements # noqa: E501
|
|
64
|
+
|
|
65
|
+
:param count: The count of this Counter. # noqa: E501
|
|
66
|
+
:type: int
|
|
67
|
+
"""
|
|
68
|
+
if count is None:
|
|
69
|
+
raise ValueError("Invalid value for `count`, must not be `None`") # noqa: E501
|
|
70
|
+
|
|
71
|
+
self._count = count
|
|
72
|
+
|
|
73
|
+
@property
|
|
74
|
+
def items_link(self):
|
|
75
|
+
"""Gets the items_link of this Counter. # noqa: E501
|
|
76
|
+
|
|
77
|
+
A link to the list of count items or to a pager with count\\ \\ items. # noqa: E501
|
|
78
|
+
|
|
79
|
+
:return: The items_link of this Counter. # noqa: E501
|
|
80
|
+
:rtype: str
|
|
81
|
+
"""
|
|
82
|
+
return self._items_link
|
|
83
|
+
|
|
84
|
+
@items_link.setter
|
|
85
|
+
def items_link(self, items_link):
|
|
86
|
+
"""Sets the items_link of this Counter.
|
|
87
|
+
|
|
88
|
+
A link to the list of count items or to a pager with count\\ \\ items. # noqa: E501
|
|
89
|
+
|
|
90
|
+
:param items_link: The items_link of this Counter. # noqa: E501
|
|
91
|
+
:type: str
|
|
92
|
+
"""
|
|
93
|
+
if items_link is None:
|
|
94
|
+
raise ValueError("Invalid value for `items_link`, must not be `None`") # noqa: E501
|
|
95
|
+
|
|
96
|
+
self._items_link = items_link
|
|
97
|
+
|
|
98
|
+
def to_dict(self):
|
|
99
|
+
"""Returns the model properties as a dict"""
|
|
100
|
+
result = {}
|
|
101
|
+
|
|
102
|
+
for attr, _ in six.iteritems(self.swagger_types):
|
|
103
|
+
value = getattr(self, attr)
|
|
104
|
+
if isinstance(value, list):
|
|
105
|
+
result[attr] = list(map(
|
|
106
|
+
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
|
|
107
|
+
value
|
|
108
|
+
))
|
|
109
|
+
elif hasattr(value, "to_dict"):
|
|
110
|
+
result[attr] = value.to_dict()
|
|
111
|
+
elif isinstance(value, dict):
|
|
112
|
+
result[attr] = dict(map(
|
|
113
|
+
lambda item: (item[0], item[1].to_dict())
|
|
114
|
+
if hasattr(item[1], "to_dict") else item,
|
|
115
|
+
value.items()
|
|
116
|
+
))
|
|
117
|
+
else:
|
|
118
|
+
result[attr] = value
|
|
119
|
+
if issubclass(Counter, dict):
|
|
120
|
+
for key, value in self.items():
|
|
121
|
+
result[key] = value
|
|
122
|
+
|
|
123
|
+
return result
|
|
124
|
+
|
|
125
|
+
def to_str(self):
|
|
126
|
+
"""Returns the string representation of the model"""
|
|
127
|
+
return pprint.pformat(self.to_dict())
|
|
128
|
+
|
|
129
|
+
def __repr__(self):
|
|
130
|
+
"""For `print` and `pprint`"""
|
|
131
|
+
return self.to_str()
|
|
132
|
+
|
|
133
|
+
def __eq__(self, other):
|
|
134
|
+
"""Returns true if both objects are equal"""
|
|
135
|
+
if not isinstance(other, Counter):
|
|
136
|
+
return False
|
|
137
|
+
|
|
138
|
+
return self.__dict__ == other.__dict__
|
|
139
|
+
|
|
140
|
+
def __ne__(self, other):
|
|
141
|
+
"""Returns true if both objects are not equal"""
|
|
142
|
+
return not self == other
|
|
@@ -0,0 +1,344 @@
|
|
|
1
|
+
# coding: utf-8
|
|
2
|
+
|
|
3
|
+
"""
|
|
4
|
+
LOCKSS Crawler Service REST API
|
|
5
|
+
|
|
6
|
+
REST API of the LOCKSS Crawler Service # noqa: E501
|
|
7
|
+
|
|
8
|
+
OpenAPI spec version: 2.0.0
|
|
9
|
+
Contact: lockss-support@lockss.org
|
|
10
|
+
Generated by: https://github.com/swagger-api/swagger-codegen.git
|
|
11
|
+
"""
|
|
12
|
+
|
|
13
|
+
import pprint
|
|
14
|
+
import re # noqa: F401
|
|
15
|
+
|
|
16
|
+
import six
|
|
17
|
+
|
|
18
|
+
class CrawlDesc(object):
|
|
19
|
+
"""NOTE: This class is auto generated by the swagger code generator program.
|
|
20
|
+
|
|
21
|
+
Do not edit the class manually.
|
|
22
|
+
"""
|
|
23
|
+
"""
|
|
24
|
+
Attributes:
|
|
25
|
+
swagger_types (dict): The key is attribute name
|
|
26
|
+
and the value is attribute type.
|
|
27
|
+
attribute_map (dict): The key is attribute name
|
|
28
|
+
and the value is json key in definition.
|
|
29
|
+
"""
|
|
30
|
+
swagger_types = {
|
|
31
|
+
'au_id': 'str',
|
|
32
|
+
'crawl_kind': 'str',
|
|
33
|
+
'crawler_id': 'str',
|
|
34
|
+
'force_crawl': 'bool',
|
|
35
|
+
'refetch_depth': 'int',
|
|
36
|
+
'priority': 'int',
|
|
37
|
+
'crawl_list': 'list[str]',
|
|
38
|
+
'crawl_depth': 'int',
|
|
39
|
+
'extra_crawler_data': 'dict(str, object)'
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
attribute_map = {
|
|
43
|
+
'au_id': 'auId',
|
|
44
|
+
'crawl_kind': 'crawlKind',
|
|
45
|
+
'crawler_id': 'crawlerId',
|
|
46
|
+
'force_crawl': 'forceCrawl',
|
|
47
|
+
'refetch_depth': 'refetchDepth',
|
|
48
|
+
'priority': 'priority',
|
|
49
|
+
'crawl_list': 'crawlList',
|
|
50
|
+
'crawl_depth': 'crawlDepth',
|
|
51
|
+
'extra_crawler_data': 'extraCrawlerData'
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
def __init__(self, au_id=None, crawl_kind=None, crawler_id='classic', force_crawl=False, refetch_depth=-1, priority=0, crawl_list=None, crawl_depth=None, extra_crawler_data=None): # noqa: E501
|
|
55
|
+
"""CrawlDesc - a model defined in Swagger""" # noqa: E501
|
|
56
|
+
self._au_id = None
|
|
57
|
+
self._crawl_kind = None
|
|
58
|
+
self._crawler_id = None
|
|
59
|
+
self._force_crawl = None
|
|
60
|
+
self._refetch_depth = None
|
|
61
|
+
self._priority = None
|
|
62
|
+
self._crawl_list = None
|
|
63
|
+
self._crawl_depth = None
|
|
64
|
+
self._extra_crawler_data = None
|
|
65
|
+
self.discriminator = None
|
|
66
|
+
self.au_id = au_id
|
|
67
|
+
self.crawl_kind = crawl_kind
|
|
68
|
+
if crawler_id is not None:
|
|
69
|
+
self.crawler_id = crawler_id
|
|
70
|
+
if force_crawl is not None:
|
|
71
|
+
self.force_crawl = force_crawl
|
|
72
|
+
if refetch_depth is not None:
|
|
73
|
+
self.refetch_depth = refetch_depth
|
|
74
|
+
if priority is not None:
|
|
75
|
+
self.priority = priority
|
|
76
|
+
if crawl_list is not None:
|
|
77
|
+
self.crawl_list = crawl_list
|
|
78
|
+
if crawl_depth is not None:
|
|
79
|
+
self.crawl_depth = crawl_depth
|
|
80
|
+
if extra_crawler_data is not None:
|
|
81
|
+
self.extra_crawler_data = extra_crawler_data
|
|
82
|
+
|
|
83
|
+
@property
|
|
84
|
+
def au_id(self):
|
|
85
|
+
"""Gets the au_id of this CrawlDesc. # noqa: E501
|
|
86
|
+
|
|
87
|
+
The identifier of the archival unit to be crawled. # noqa: E501
|
|
88
|
+
|
|
89
|
+
:return: The au_id of this CrawlDesc. # noqa: E501
|
|
90
|
+
:rtype: str
|
|
91
|
+
"""
|
|
92
|
+
return self._au_id
|
|
93
|
+
|
|
94
|
+
@au_id.setter
|
|
95
|
+
def au_id(self, au_id):
|
|
96
|
+
"""Sets the au_id of this CrawlDesc.
|
|
97
|
+
|
|
98
|
+
The identifier of the archival unit to be crawled. # noqa: E501
|
|
99
|
+
|
|
100
|
+
:param au_id: The au_id of this CrawlDesc. # noqa: E501
|
|
101
|
+
:type: str
|
|
102
|
+
"""
|
|
103
|
+
if au_id is None:
|
|
104
|
+
raise ValueError("Invalid value for `au_id`, must not be `None`") # noqa: E501
|
|
105
|
+
|
|
106
|
+
self._au_id = au_id
|
|
107
|
+
|
|
108
|
+
@property
|
|
109
|
+
def crawl_kind(self):
|
|
110
|
+
"""Gets the crawl_kind of this CrawlDesc. # noqa: E501
|
|
111
|
+
|
|
112
|
+
The kind of crawl being performed either 'newContent' or 'repair'. # noqa: E501
|
|
113
|
+
|
|
114
|
+
:return: The crawl_kind of this CrawlDesc. # noqa: E501
|
|
115
|
+
:rtype: str
|
|
116
|
+
"""
|
|
117
|
+
return self._crawl_kind
|
|
118
|
+
|
|
119
|
+
@crawl_kind.setter
|
|
120
|
+
def crawl_kind(self, crawl_kind):
|
|
121
|
+
"""Sets the crawl_kind of this CrawlDesc.
|
|
122
|
+
|
|
123
|
+
The kind of crawl being performed either 'newContent' or 'repair'. # noqa: E501
|
|
124
|
+
|
|
125
|
+
:param crawl_kind: The crawl_kind of this CrawlDesc. # noqa: E501
|
|
126
|
+
:type: str
|
|
127
|
+
"""
|
|
128
|
+
if crawl_kind is None:
|
|
129
|
+
raise ValueError("Invalid value for `crawl_kind`, must not be `None`") # noqa: E501
|
|
130
|
+
allowed_values = ["newContent", "repair"] # noqa: E501
|
|
131
|
+
if crawl_kind not in allowed_values:
|
|
132
|
+
raise ValueError(
|
|
133
|
+
"Invalid value for `crawl_kind` ({0}), must be one of {1}" # noqa: E501
|
|
134
|
+
.format(crawl_kind, allowed_values)
|
|
135
|
+
)
|
|
136
|
+
|
|
137
|
+
self._crawl_kind = crawl_kind
|
|
138
|
+
|
|
139
|
+
@property
|
|
140
|
+
def crawler_id(self):
|
|
141
|
+
"""Gets the crawler_id of this CrawlDesc. # noqa: E501
|
|
142
|
+
|
|
143
|
+
The crawler to be used for this crawl. # noqa: E501
|
|
144
|
+
|
|
145
|
+
:return: The crawler_id of this CrawlDesc. # noqa: E501
|
|
146
|
+
:rtype: str
|
|
147
|
+
"""
|
|
148
|
+
return self._crawler_id
|
|
149
|
+
|
|
150
|
+
@crawler_id.setter
|
|
151
|
+
def crawler_id(self, crawler_id):
|
|
152
|
+
"""Sets the crawler_id of this CrawlDesc.
|
|
153
|
+
|
|
154
|
+
The crawler to be used for this crawl. # noqa: E501
|
|
155
|
+
|
|
156
|
+
:param crawler_id: The crawler_id of this CrawlDesc. # noqa: E501
|
|
157
|
+
:type: str
|
|
158
|
+
"""
|
|
159
|
+
|
|
160
|
+
self._crawler_id = crawler_id
|
|
161
|
+
|
|
162
|
+
@property
|
|
163
|
+
def force_crawl(self):
|
|
164
|
+
"""Gets the force_crawl of this CrawlDesc. # noqa: E501
|
|
165
|
+
|
|
166
|
+
An indication of whether the crawl is to be forced, suppressing conditions that might otherwise prevent the crawl from happening. # noqa: E501
|
|
167
|
+
|
|
168
|
+
:return: The force_crawl of this CrawlDesc. # noqa: E501
|
|
169
|
+
:rtype: bool
|
|
170
|
+
"""
|
|
171
|
+
return self._force_crawl
|
|
172
|
+
|
|
173
|
+
@force_crawl.setter
|
|
174
|
+
def force_crawl(self, force_crawl):
|
|
175
|
+
"""Sets the force_crawl of this CrawlDesc.
|
|
176
|
+
|
|
177
|
+
An indication of whether the crawl is to be forced, suppressing conditions that might otherwise prevent the crawl from happening. # noqa: E501
|
|
178
|
+
|
|
179
|
+
:param force_crawl: The force_crawl of this CrawlDesc. # noqa: E501
|
|
180
|
+
:type: bool
|
|
181
|
+
"""
|
|
182
|
+
|
|
183
|
+
self._force_crawl = force_crawl
|
|
184
|
+
|
|
185
|
+
@property
|
|
186
|
+
def refetch_depth(self):
|
|
187
|
+
"""Gets the refetch_depth of this CrawlDesc. # noqa: E501
|
|
188
|
+
|
|
189
|
+
The refetch depth to use for a deep crawl. # noqa: E501
|
|
190
|
+
|
|
191
|
+
:return: The refetch_depth of this CrawlDesc. # noqa: E501
|
|
192
|
+
:rtype: int
|
|
193
|
+
"""
|
|
194
|
+
return self._refetch_depth
|
|
195
|
+
|
|
196
|
+
@refetch_depth.setter
|
|
197
|
+
def refetch_depth(self, refetch_depth):
|
|
198
|
+
"""Sets the refetch_depth of this CrawlDesc.
|
|
199
|
+
|
|
200
|
+
The refetch depth to use for a deep crawl. # noqa: E501
|
|
201
|
+
|
|
202
|
+
:param refetch_depth: The refetch_depth of this CrawlDesc. # noqa: E501
|
|
203
|
+
:type: int
|
|
204
|
+
"""
|
|
205
|
+
|
|
206
|
+
self._refetch_depth = refetch_depth
|
|
207
|
+
|
|
208
|
+
@property
|
|
209
|
+
def priority(self):
|
|
210
|
+
"""Gets the priority of this CrawlDesc. # noqa: E501
|
|
211
|
+
|
|
212
|
+
The priority for the crawl. # noqa: E501
|
|
213
|
+
|
|
214
|
+
:return: The priority of this CrawlDesc. # noqa: E501
|
|
215
|
+
:rtype: int
|
|
216
|
+
"""
|
|
217
|
+
return self._priority
|
|
218
|
+
|
|
219
|
+
@priority.setter
|
|
220
|
+
def priority(self, priority):
|
|
221
|
+
"""Sets the priority of this CrawlDesc.
|
|
222
|
+
|
|
223
|
+
The priority for the crawl. # noqa: E501
|
|
224
|
+
|
|
225
|
+
:param priority: The priority of this CrawlDesc. # noqa: E501
|
|
226
|
+
:type: int
|
|
227
|
+
"""
|
|
228
|
+
|
|
229
|
+
self._priority = priority
|
|
230
|
+
|
|
231
|
+
@property
|
|
232
|
+
def crawl_list(self):
|
|
233
|
+
"""Gets the crawl_list of this CrawlDesc. # noqa: E501
|
|
234
|
+
|
|
235
|
+
The list of URLs to crawl. # noqa: E501
|
|
236
|
+
|
|
237
|
+
:return: The crawl_list of this CrawlDesc. # noqa: E501
|
|
238
|
+
:rtype: list[str]
|
|
239
|
+
"""
|
|
240
|
+
return self._crawl_list
|
|
241
|
+
|
|
242
|
+
@crawl_list.setter
|
|
243
|
+
def crawl_list(self, crawl_list):
|
|
244
|
+
"""Sets the crawl_list of this CrawlDesc.
|
|
245
|
+
|
|
246
|
+
The list of URLs to crawl. # noqa: E501
|
|
247
|
+
|
|
248
|
+
:param crawl_list: The crawl_list of this CrawlDesc. # noqa: E501
|
|
249
|
+
:type: list[str]
|
|
250
|
+
"""
|
|
251
|
+
|
|
252
|
+
self._crawl_list = crawl_list
|
|
253
|
+
|
|
254
|
+
@property
|
|
255
|
+
def crawl_depth(self):
|
|
256
|
+
"""Gets the crawl_depth of this CrawlDesc. # noqa: E501
|
|
257
|
+
|
|
258
|
+
The depth to which the links should be followed. 0 means do not follow links. # noqa: E501
|
|
259
|
+
|
|
260
|
+
:return: The crawl_depth of this CrawlDesc. # noqa: E501
|
|
261
|
+
:rtype: int
|
|
262
|
+
"""
|
|
263
|
+
return self._crawl_depth
|
|
264
|
+
|
|
265
|
+
@crawl_depth.setter
|
|
266
|
+
def crawl_depth(self, crawl_depth):
|
|
267
|
+
"""Sets the crawl_depth of this CrawlDesc.
|
|
268
|
+
|
|
269
|
+
The depth to which the links should be followed. 0 means do not follow links. # noqa: E501
|
|
270
|
+
|
|
271
|
+
:param crawl_depth: The crawl_depth of this CrawlDesc. # noqa: E501
|
|
272
|
+
:type: int
|
|
273
|
+
"""
|
|
274
|
+
|
|
275
|
+
self._crawl_depth = crawl_depth
|
|
276
|
+
|
|
277
|
+
@property
|
|
278
|
+
def extra_crawler_data(self):
|
|
279
|
+
"""Gets the extra_crawler_data of this CrawlDesc. # noqa: E501
|
|
280
|
+
|
|
281
|
+
A map of additional properties for a crawl on a given crawler. # noqa: E501
|
|
282
|
+
|
|
283
|
+
:return: The extra_crawler_data of this CrawlDesc. # noqa: E501
|
|
284
|
+
:rtype: dict(str, object)
|
|
285
|
+
"""
|
|
286
|
+
return self._extra_crawler_data
|
|
287
|
+
|
|
288
|
+
@extra_crawler_data.setter
|
|
289
|
+
def extra_crawler_data(self, extra_crawler_data):
|
|
290
|
+
"""Sets the extra_crawler_data of this CrawlDesc.
|
|
291
|
+
|
|
292
|
+
A map of additional properties for a crawl on a given crawler. # noqa: E501
|
|
293
|
+
|
|
294
|
+
:param extra_crawler_data: The extra_crawler_data of this CrawlDesc. # noqa: E501
|
|
295
|
+
:type: dict(str, object)
|
|
296
|
+
"""
|
|
297
|
+
|
|
298
|
+
self._extra_crawler_data = extra_crawler_data
|
|
299
|
+
|
|
300
|
+
def to_dict(self):
|
|
301
|
+
"""Returns the model properties as a dict"""
|
|
302
|
+
result = {}
|
|
303
|
+
|
|
304
|
+
for attr, _ in six.iteritems(self.swagger_types):
|
|
305
|
+
value = getattr(self, attr)
|
|
306
|
+
if isinstance(value, list):
|
|
307
|
+
result[attr] = list(map(
|
|
308
|
+
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
|
|
309
|
+
value
|
|
310
|
+
))
|
|
311
|
+
elif hasattr(value, "to_dict"):
|
|
312
|
+
result[attr] = value.to_dict()
|
|
313
|
+
elif isinstance(value, dict):
|
|
314
|
+
result[attr] = dict(map(
|
|
315
|
+
lambda item: (item[0], item[1].to_dict())
|
|
316
|
+
if hasattr(item[1], "to_dict") else item,
|
|
317
|
+
value.items()
|
|
318
|
+
))
|
|
319
|
+
else:
|
|
320
|
+
result[attr] = value
|
|
321
|
+
if issubclass(CrawlDesc, dict):
|
|
322
|
+
for key, value in self.items():
|
|
323
|
+
result[key] = value
|
|
324
|
+
|
|
325
|
+
return result
|
|
326
|
+
|
|
327
|
+
def to_str(self):
|
|
328
|
+
"""Returns the string representation of the model"""
|
|
329
|
+
return pprint.pformat(self.to_dict())
|
|
330
|
+
|
|
331
|
+
def __repr__(self):
|
|
332
|
+
"""For `print` and `pprint`"""
|
|
333
|
+
return self.to_str()
|
|
334
|
+
|
|
335
|
+
def __eq__(self, other):
|
|
336
|
+
"""Returns true if both objects are equal"""
|
|
337
|
+
if not isinstance(other, CrawlDesc):
|
|
338
|
+
return False
|
|
339
|
+
|
|
340
|
+
return self.__dict__ == other.__dict__
|
|
341
|
+
|
|
342
|
+
def __ne__(self, other):
|
|
343
|
+
"""Returns true if both objects are not equal"""
|
|
344
|
+
return not self == other
|