pyzotero 1.7.6__py3-none-any.whl → 1.9.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pyzotero/__init__.py +60 -0
- pyzotero/_client.py +1402 -0
- pyzotero/_decorators.py +195 -0
- pyzotero/_search.py +190 -0
- pyzotero/_upload.py +241 -0
- pyzotero/_utils.py +86 -0
- pyzotero/cli.py +789 -4
- pyzotero/errors.py +185 -0
- pyzotero/filetransport.py +2 -2
- pyzotero/semantic_scholar.py +441 -0
- pyzotero/zotero.py +62 -2035
- pyzotero/zotero_errors.py +53 -136
- {pyzotero-1.7.6.dist-info → pyzotero-1.9.0.dist-info}/METADATA +3 -3
- pyzotero-1.9.0.dist-info/RECORD +16 -0
- pyzotero-1.7.6.dist-info/RECORD +0 -9
- {pyzotero-1.7.6.dist-info → pyzotero-1.9.0.dist-info}/WHEEL +0 -0
- {pyzotero-1.7.6.dist-info → pyzotero-1.9.0.dist-info}/entry_points.txt +0 -0
pyzotero/_client.py
ADDED
|
@@ -0,0 +1,1402 @@
|
|
|
1
|
+
"""Zotero API client for Pyzotero.
|
|
2
|
+
|
|
3
|
+
This module contains the main Zotero class which provides methods for
|
|
4
|
+
interacting with the Zotero API.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
from __future__ import annotations
|
|
8
|
+
|
|
9
|
+
import copy
|
|
10
|
+
import json
|
|
11
|
+
import re
|
|
12
|
+
import time
|
|
13
|
+
from pathlib import Path, PurePosixPath
|
|
14
|
+
from urllib.parse import (
|
|
15
|
+
parse_qs,
|
|
16
|
+
parse_qsl,
|
|
17
|
+
quote,
|
|
18
|
+
unquote,
|
|
19
|
+
urlencode,
|
|
20
|
+
urlparse,
|
|
21
|
+
urlunparse,
|
|
22
|
+
)
|
|
23
|
+
|
|
24
|
+
import httpx
|
|
25
|
+
import whenever
|
|
26
|
+
from httpx import Request
|
|
27
|
+
|
|
28
|
+
import pyzotero as pz
|
|
29
|
+
|
|
30
|
+
from . import errors as ze
|
|
31
|
+
from ._decorators import backoff_check, cleanwrap, retrieve, ss_wrap, tcache
|
|
32
|
+
from ._upload import Zupload
|
|
33
|
+
from ._utils import (
|
|
34
|
+
DEFAULT_ITEM_LIMIT,
|
|
35
|
+
DEFAULT_NUM_ITEMS,
|
|
36
|
+
DEFAULT_TIMEOUT,
|
|
37
|
+
ONE_HOUR,
|
|
38
|
+
build_url,
|
|
39
|
+
chunks,
|
|
40
|
+
get_backoff_duration,
|
|
41
|
+
merge_params,
|
|
42
|
+
token,
|
|
43
|
+
)
|
|
44
|
+
from .errors import error_handler
|
|
45
|
+
from .filetransport import Client as File_Client
|
|
46
|
+
|
|
47
|
+
__author__ = "Stephan Hügel"
|
|
48
|
+
__api_version__ = "3"
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
class Zotero:
|
|
52
|
+
"""Zotero API methods.
|
|
53
|
+
|
|
54
|
+
A full list of methods can be found here:
|
|
55
|
+
http://www.zotero.org/support/dev/server_api
|
|
56
|
+
"""
|
|
57
|
+
|
|
58
|
+
def __init__(
|
|
59
|
+
self,
|
|
60
|
+
library_id=None,
|
|
61
|
+
library_type=None,
|
|
62
|
+
api_key=None,
|
|
63
|
+
preserve_json_order=False,
|
|
64
|
+
locale="en-US",
|
|
65
|
+
local=False,
|
|
66
|
+
client=None,
|
|
67
|
+
):
|
|
68
|
+
self.client = None
|
|
69
|
+
"""Store Zotero credentials"""
|
|
70
|
+
if not local:
|
|
71
|
+
self.endpoint = "https://api.zotero.org"
|
|
72
|
+
self.local = False
|
|
73
|
+
else:
|
|
74
|
+
self.endpoint = "http://localhost:23119/api"
|
|
75
|
+
self.local = True
|
|
76
|
+
if library_id is not None and library_type:
|
|
77
|
+
self.library_id = library_id
|
|
78
|
+
# library_type determines whether query begins w. /users or /groups
|
|
79
|
+
self.library_type = library_type + "s"
|
|
80
|
+
else:
|
|
81
|
+
err = "Please provide both the library ID and the library type"
|
|
82
|
+
raise ze.MissingCredentialsError(err)
|
|
83
|
+
# api_key is not required for public individual or group libraries
|
|
84
|
+
self.api_key = api_key
|
|
85
|
+
if preserve_json_order:
|
|
86
|
+
import warnings # noqa: PLC0415
|
|
87
|
+
|
|
88
|
+
warnings.warn(
|
|
89
|
+
"preserve_json_order is deprecated and will be removed in a future version. "
|
|
90
|
+
"Python 3.7+ dicts preserve insertion order automatically.",
|
|
91
|
+
DeprecationWarning,
|
|
92
|
+
stacklevel=2,
|
|
93
|
+
)
|
|
94
|
+
self.locale = locale
|
|
95
|
+
self.url_params = None
|
|
96
|
+
self.tag_data = False
|
|
97
|
+
self.request = None
|
|
98
|
+
self.snapshot = False
|
|
99
|
+
self.client = client or httpx.Client(
|
|
100
|
+
headers=self.default_headers(),
|
|
101
|
+
follow_redirects=True,
|
|
102
|
+
)
|
|
103
|
+
# these aren't valid item fields, so never send them to the server
|
|
104
|
+
self.temp_keys = {"key", "etag", "group_id", "updated"}
|
|
105
|
+
# determine which processor to use for the parsed content
|
|
106
|
+
self.fmt = re.compile(r"(?<=format=)\w+")
|
|
107
|
+
self.content = re.compile(r"(?<=content=)\w+")
|
|
108
|
+
# JSON by default
|
|
109
|
+
self.formats = {
|
|
110
|
+
"application/atom+xml": "atom",
|
|
111
|
+
"application/x-bibtex": "bibtex",
|
|
112
|
+
"application/json": "json",
|
|
113
|
+
"text/html": "snapshot",
|
|
114
|
+
"text/plain": "plain",
|
|
115
|
+
"text/markdown": "plain",
|
|
116
|
+
"application/pdf; charset=utf-8": "pdf",
|
|
117
|
+
"application/pdf": "pdf",
|
|
118
|
+
"application/msword": "doc",
|
|
119
|
+
"application/vnd.openxmlformats-officedocument.spreadsheetml.sheet": "xlsx",
|
|
120
|
+
"application/vnd.openxmlformats-officedocument.wordprocessingml.document": "docx",
|
|
121
|
+
"application/vnd.openxmlformats-officedocument.presentationml.presentation": "pptx",
|
|
122
|
+
"application/zip": "zip",
|
|
123
|
+
"application/epub+zip": "zip",
|
|
124
|
+
"audio/mpeg": "mp3",
|
|
125
|
+
"video/mp4": "mp4",
|
|
126
|
+
"audio/x-wav": "wav",
|
|
127
|
+
"video/x-msvideo": "avi",
|
|
128
|
+
"application/octet-stream": "octet",
|
|
129
|
+
"application/x-tex": "tex",
|
|
130
|
+
"application/x-texinfo": "texinfo",
|
|
131
|
+
"image/jpeg": "jpeg",
|
|
132
|
+
"image/png": "png",
|
|
133
|
+
"image/gif": "gif",
|
|
134
|
+
"image/tiff": "tiff",
|
|
135
|
+
"application/postscript": "postscript",
|
|
136
|
+
"application/rtf": "rtf",
|
|
137
|
+
}
|
|
138
|
+
self.processors = {
|
|
139
|
+
"bib": self._bib_processor,
|
|
140
|
+
"citation": self._citation_processor,
|
|
141
|
+
"bibtex": self._bib_processor,
|
|
142
|
+
"bookmarks": self._bib_processor,
|
|
143
|
+
"coins": self._bib_processor,
|
|
144
|
+
"csljson": self._csljson_processor,
|
|
145
|
+
"mods": self._bib_processor,
|
|
146
|
+
"refer": self._bib_processor,
|
|
147
|
+
"rdf_bibliontology": self._bib_processor,
|
|
148
|
+
"rdf_dc": self._bib_processor,
|
|
149
|
+
"rdf_zotero": self._bib_processor,
|
|
150
|
+
"ris": self._bib_processor,
|
|
151
|
+
"tei": self._bib_processor,
|
|
152
|
+
"wikipedia": self._bib_processor,
|
|
153
|
+
"json": self._json_processor,
|
|
154
|
+
"html": self._bib_processor,
|
|
155
|
+
}
|
|
156
|
+
self.links = None
|
|
157
|
+
self.self_link = {}
|
|
158
|
+
self.templates = {}
|
|
159
|
+
self.savedsearch = None
|
|
160
|
+
# backoff handling: timestamp when backoff expires (0.0 = no backoff)
|
|
161
|
+
self.backoff_until = 0.0
|
|
162
|
+
|
|
163
|
+
def __del__(self):
|
|
164
|
+
"""Remove client before cleanup."""
|
|
165
|
+
# this isn't guaranteed to run, but that's OK
|
|
166
|
+
if c := self.client:
|
|
167
|
+
c.close()
|
|
168
|
+
|
|
169
|
+
@property
|
|
170
|
+
def __version__(self):
|
|
171
|
+
"""Return the version of the pyzotero library."""
|
|
172
|
+
return pz.__version__
|
|
173
|
+
|
|
174
|
+
def _check_for_component(self, url, component):
|
|
175
|
+
"""Check a url path query fragment for a specific query parameter."""
|
|
176
|
+
return bool(parse_qs(url).get(component))
|
|
177
|
+
|
|
178
|
+
def _striplocal(self, url):
|
|
179
|
+
"""Remove the leading '/api' substring from urls if running in local mode."""
|
|
180
|
+
if self.local:
|
|
181
|
+
parsed = urlparse(url)
|
|
182
|
+
purepath = PurePosixPath(unquote(parsed.path))
|
|
183
|
+
newpath = "/".join(purepath.parts[2:])
|
|
184
|
+
replaced = parsed._replace(path="/" + newpath)
|
|
185
|
+
return urlunparse(replaced)
|
|
186
|
+
return url
|
|
187
|
+
|
|
188
|
+
def _set_backoff(self, duration):
|
|
189
|
+
"""Set backoff expiration time."""
|
|
190
|
+
self.backoff_until = time.time() + float(duration)
|
|
191
|
+
|
|
192
|
+
def _check_backoff(self):
|
|
193
|
+
"""Wait if backoff is active."""
|
|
194
|
+
remainder = self.backoff_until - time.time()
|
|
195
|
+
if remainder > 0.0:
|
|
196
|
+
time.sleep(remainder)
|
|
197
|
+
|
|
198
|
+
def default_headers(self):
|
|
199
|
+
"""Return headers that are always OK to include."""
|
|
200
|
+
_headers = {
|
|
201
|
+
"User-Agent": f"Pyzotero/{pz.__version__}",
|
|
202
|
+
"Zotero-API-Version": f"{__api_version__}",
|
|
203
|
+
}
|
|
204
|
+
if self.api_key:
|
|
205
|
+
_headers["Authorization"] = f"Bearer {self.api_key}"
|
|
206
|
+
return _headers
|
|
207
|
+
|
|
208
|
+
def _cache(self, response, key):
|
|
209
|
+
"""Add a retrieved template to the cache for 304 checking.
|
|
210
|
+
|
|
211
|
+
Accepts a dict and key name, adds the retrieval time, and adds both
|
|
212
|
+
to self.templates as a new dict using the specified key.
|
|
213
|
+
"""
|
|
214
|
+
# cache template and retrieval time for subsequent calls
|
|
215
|
+
try:
|
|
216
|
+
thetime = whenever.ZonedDateTime.now("Europe/London").py_datetime()
|
|
217
|
+
except AttributeError:
|
|
218
|
+
thetime = whenever.ZonedDateTime.now("Europe/London").py_datetime()
|
|
219
|
+
self.templates[key] = {"tmplt": response.json(), "updated": thetime}
|
|
220
|
+
return copy.deepcopy(response.json())
|
|
221
|
+
|
|
222
|
+
@cleanwrap
|
|
223
|
+
def _cleanup(self, to_clean, allow=()):
|
|
224
|
+
"""Remove keys we added for internal use."""
|
|
225
|
+
# this item's been retrieved from the API, we only need the 'data' entry
|
|
226
|
+
if to_clean.keys() == ["links", "library", "version", "meta", "key", "data"]:
|
|
227
|
+
to_clean = to_clean["data"]
|
|
228
|
+
return {
|
|
229
|
+
k: v for k, v in to_clean.items() if k in allow or k not in self.temp_keys
|
|
230
|
+
}
|
|
231
|
+
|
|
232
|
+
def _retrieve_data(self, request: str | None = None, params=None):
|
|
233
|
+
"""Retrieve Zotero items via the API.
|
|
234
|
+
|
|
235
|
+
Combine endpoint and request to access the specific resource.
|
|
236
|
+
Returns a JSON document.
|
|
237
|
+
"""
|
|
238
|
+
if request is None:
|
|
239
|
+
request = ""
|
|
240
|
+
full_url = build_url(self.endpoint, request)
|
|
241
|
+
# ensure that we wait if there's an active backoff
|
|
242
|
+
self._check_backoff()
|
|
243
|
+
# don't set locale if the url already contains it
|
|
244
|
+
# we always add a locale if it's a "standalone" or first call
|
|
245
|
+
needs_locale = not self.links or not self._check_for_component(
|
|
246
|
+
self.links.get("next"),
|
|
247
|
+
"locale",
|
|
248
|
+
)
|
|
249
|
+
if needs_locale:
|
|
250
|
+
if params:
|
|
251
|
+
params["locale"] = self.locale
|
|
252
|
+
else:
|
|
253
|
+
params = {"locale": self.locale}
|
|
254
|
+
# we now have to merge self.url_params (default params, and those supplied by the user)
|
|
255
|
+
if not params:
|
|
256
|
+
params = {}
|
|
257
|
+
if not self.url_params:
|
|
258
|
+
self.url_params = {}
|
|
259
|
+
merged_params = {**self.url_params, **params}
|
|
260
|
+
# our incoming url might be from the "links" dict, in which case it will contain url parameters.
|
|
261
|
+
# Unfortunately, httpx doesn't like to merge query parameters in the url string and passed params
|
|
262
|
+
# so we strip the url params, combining them with our existing url_params
|
|
263
|
+
final_url, final_params = merge_params(full_url, merged_params)
|
|
264
|
+
# file URI errors are raised immediately so we have to try here
|
|
265
|
+
try:
|
|
266
|
+
self.request = self.client.get(
|
|
267
|
+
url=final_url,
|
|
268
|
+
params=final_params,
|
|
269
|
+
headers=self.default_headers(),
|
|
270
|
+
timeout=DEFAULT_TIMEOUT,
|
|
271
|
+
)
|
|
272
|
+
self.request.encoding = "utf-8"
|
|
273
|
+
# The API doesn't return this any more, so we have to cheat
|
|
274
|
+
self.self_link = self.request.url
|
|
275
|
+
except httpx.UnsupportedProtocol:
|
|
276
|
+
# File URI handler logic
|
|
277
|
+
fc = File_Client()
|
|
278
|
+
response = fc.get(
|
|
279
|
+
url=final_url,
|
|
280
|
+
params=final_params,
|
|
281
|
+
headers=self.default_headers(),
|
|
282
|
+
timeout=DEFAULT_TIMEOUT,
|
|
283
|
+
follow_redirects=True,
|
|
284
|
+
)
|
|
285
|
+
self.request = response
|
|
286
|
+
# since we'll be writing bytes, we need to set this to a type that will trigger the bytes processor
|
|
287
|
+
self.request.headers["Content-Type"] = "text/plain"
|
|
288
|
+
try:
|
|
289
|
+
self.request.raise_for_status()
|
|
290
|
+
except httpx.HTTPError as exc:
|
|
291
|
+
error_handler(self, self.request, exc)
|
|
292
|
+
backoff = get_backoff_duration(self.request.headers)
|
|
293
|
+
if backoff:
|
|
294
|
+
self._set_backoff(backoff)
|
|
295
|
+
return self.request
|
|
296
|
+
|
|
297
|
+
def _extract_links(self):
|
|
298
|
+
"""Extract self, first, next, last links from a request response."""
|
|
299
|
+
extracted = {}
|
|
300
|
+
try:
|
|
301
|
+
for key, value in self.request.links.items():
|
|
302
|
+
parsed = urlparse(value["url"])
|
|
303
|
+
fragment = urlunparse(("", "", parsed.path, "", parsed.query, ""))
|
|
304
|
+
extracted[key] = fragment
|
|
305
|
+
# add a 'self' link
|
|
306
|
+
parsed = urlparse(str(self.self_link))
|
|
307
|
+
# strip 'format' query parameter and rebuild query string
|
|
308
|
+
query_params = [(k, v) for k, v in parse_qsl(parsed.query) if k != "format"]
|
|
309
|
+
# rebuild url fragment with just path and query (consistent with other links)
|
|
310
|
+
extracted["self"] = urlunparse(
|
|
311
|
+
("", "", parsed.path, "", urlencode(query_params), "")
|
|
312
|
+
)
|
|
313
|
+
except KeyError:
|
|
314
|
+
# No links present, because it's a single item
|
|
315
|
+
return None
|
|
316
|
+
else:
|
|
317
|
+
return extracted
|
|
318
|
+
|
|
319
|
+
def _updated(self, url, payload, template=None):
|
|
320
|
+
"""Call to see if a template request returns 304.
|
|
321
|
+
|
|
322
|
+
Accepts:
|
|
323
|
+
- a string to combine with the API endpoint
|
|
324
|
+
- a dict of format values, in case they're required by 'url'
|
|
325
|
+
- a template name to check for
|
|
326
|
+
|
|
327
|
+
As per the API docs, a template less than 1 hour old is
|
|
328
|
+
assumed to be fresh, and will immediately return False if found.
|
|
329
|
+
"""
|
|
330
|
+
# If the template is more than an hour old, try a 304
|
|
331
|
+
if (
|
|
332
|
+
abs(
|
|
333
|
+
whenever.ZonedDateTime.now("Europe/London").py_datetime()
|
|
334
|
+
- self.templates[template]["updated"],
|
|
335
|
+
).seconds
|
|
336
|
+
> ONE_HOUR
|
|
337
|
+
):
|
|
338
|
+
query = build_url(
|
|
339
|
+
self.endpoint,
|
|
340
|
+
url.format(u=self.library_id, t=self.library_type, **payload),
|
|
341
|
+
)
|
|
342
|
+
headers = {
|
|
343
|
+
"If-Modified-Since": payload["updated"].strftime(
|
|
344
|
+
"%a, %d %b %Y %H:%M:%S %Z",
|
|
345
|
+
),
|
|
346
|
+
}
|
|
347
|
+
# perform the request, and check whether the response returns 304
|
|
348
|
+
self._check_backoff()
|
|
349
|
+
req = self.client.get(query, headers=headers)
|
|
350
|
+
try:
|
|
351
|
+
req.raise_for_status()
|
|
352
|
+
except httpx.HTTPError as exc:
|
|
353
|
+
error_handler(self, req, exc)
|
|
354
|
+
backoff = get_backoff_duration(self.request.headers)
|
|
355
|
+
if backoff:
|
|
356
|
+
self._set_backoff(backoff)
|
|
357
|
+
return req.status_code == httpx.codes.NOT_MODIFIED
|
|
358
|
+
# Still plenty of life left in't
|
|
359
|
+
return False
|
|
360
|
+
|
|
361
|
+
def add_parameters(self, **params):
|
|
362
|
+
"""Add URL parameters.
|
|
363
|
+
|
|
364
|
+
Also ensure that only valid format/content combinations are requested.
|
|
365
|
+
"""
|
|
366
|
+
# Preserve constructor-level parameters (like locale) while allowing method-level overrides
|
|
367
|
+
if self.url_params is None:
|
|
368
|
+
self.url_params = {}
|
|
369
|
+
|
|
370
|
+
# Store existing params to preserve things like locale
|
|
371
|
+
preserved_params = self.url_params.copy()
|
|
372
|
+
|
|
373
|
+
# we want JSON by default
|
|
374
|
+
if not params.get("format"):
|
|
375
|
+
params["format"] = "json"
|
|
376
|
+
# non-standard content must be retrieved as Atom
|
|
377
|
+
if params.get("content"):
|
|
378
|
+
params["format"] = "atom"
|
|
379
|
+
# TODO: rewrite format=atom, content=json request
|
|
380
|
+
if "limit" not in params or params.get("limit") == 0:
|
|
381
|
+
params["limit"] = DEFAULT_ITEM_LIMIT
|
|
382
|
+
# Need ability to request arbitrary number of results for version
|
|
383
|
+
# response
|
|
384
|
+
# -1 value is hack that works with current version
|
|
385
|
+
elif params["limit"] == -1 or params["limit"] is None:
|
|
386
|
+
del params["limit"]
|
|
387
|
+
# bib format can't have a limit
|
|
388
|
+
if params.get("format") == "bib":
|
|
389
|
+
params.pop("limit", None)
|
|
390
|
+
|
|
391
|
+
# Merge preserved params with new params (new params override existing ones)
|
|
392
|
+
self.url_params = {**preserved_params, **params}
|
|
393
|
+
|
|
394
|
+
def _build_query(self, query_string, no_params=False):
|
|
395
|
+
"""Set request parameters.
|
|
396
|
+
|
|
397
|
+
Will always add the user ID if it hasn't been specifically set by an API method.
|
|
398
|
+
"""
|
|
399
|
+
try:
|
|
400
|
+
query = quote(query_string.format(u=self.library_id, t=self.library_type))
|
|
401
|
+
except KeyError as err:
|
|
402
|
+
errmsg = f"There's a request parameter missing: {err}"
|
|
403
|
+
raise ze.ParamNotPassedError(errmsg) from None
|
|
404
|
+
# Add the URL parameters and the user key, if necessary
|
|
405
|
+
if no_params is False and not self.url_params:
|
|
406
|
+
self.add_parameters()
|
|
407
|
+
return query
|
|
408
|
+
|
|
409
|
+
@retrieve
|
|
410
|
+
def publications(self):
|
|
411
|
+
"""Return the contents of My Publications."""
|
|
412
|
+
if self.library_type != "users":
|
|
413
|
+
msg = "This API call does not exist for group libraries"
|
|
414
|
+
raise ze.CallDoesNotExistError(msg)
|
|
415
|
+
query_string = "/{t}/{u}/publications/items"
|
|
416
|
+
return self._build_query(query_string)
|
|
417
|
+
|
|
418
|
+
# The following methods are Zotero Read API calls
|
|
419
|
+
def num_items(self):
|
|
420
|
+
"""Return the total number of top-level items in the library."""
|
|
421
|
+
query = "/{t}/{u}/items/top"
|
|
422
|
+
return self._totals(query)
|
|
423
|
+
|
|
424
|
+
def count_items(self):
|
|
425
|
+
"""Return the count of all items in a group / library."""
|
|
426
|
+
query = "/{t}/{u}/items"
|
|
427
|
+
return self._totals(query)
|
|
428
|
+
|
|
429
|
+
def num_collectionitems(self, collection):
|
|
430
|
+
"""Return the total number of items in the specified collection."""
|
|
431
|
+
query = f"/{self.library_type}/{self.library_id}/collections/{collection.upper()}/items"
|
|
432
|
+
return self._totals(query)
|
|
433
|
+
|
|
434
|
+
def _totals(self, query):
|
|
435
|
+
"""General method for returning total counts."""
|
|
436
|
+
self.add_parameters(limit=1)
|
|
437
|
+
query = self._build_query(query)
|
|
438
|
+
self._retrieve_data(query)
|
|
439
|
+
self.url_params = None
|
|
440
|
+
# extract the 'total items' figure
|
|
441
|
+
return int(self.request.headers["Total-Results"])
|
|
442
|
+
|
|
443
|
+
@retrieve
|
|
444
|
+
def key_info(self, **kwargs):
|
|
445
|
+
"""Retrieve info about the permissions associated with the key."""
|
|
446
|
+
query_string = f"/keys/{self.api_key}"
|
|
447
|
+
return self._build_query(query_string)
|
|
448
|
+
|
|
449
|
+
@retrieve
|
|
450
|
+
def items(self, **kwargs):
|
|
451
|
+
"""Get user items."""
|
|
452
|
+
query_string = "/{t}/{u}/items"
|
|
453
|
+
return self._build_query(query_string)
|
|
454
|
+
|
|
455
|
+
@retrieve
|
|
456
|
+
def settings(self, **kwargs):
|
|
457
|
+
"""Get synced user settings."""
|
|
458
|
+
query_string = "/{t}/{u}/settings"
|
|
459
|
+
return self._build_query(query_string)
|
|
460
|
+
|
|
461
|
+
@retrieve
|
|
462
|
+
def fulltext_item(self, itemkey, **kwargs):
|
|
463
|
+
"""Get full-text content for an item."""
|
|
464
|
+
query_string = (
|
|
465
|
+
f"/{self.library_type}/{self.library_id}/items/{itemkey}/fulltext"
|
|
466
|
+
)
|
|
467
|
+
return self._build_query(query_string)
|
|
468
|
+
|
|
469
|
+
@backoff_check
|
|
470
|
+
def set_fulltext(self, itemkey, payload):
|
|
471
|
+
"""Set full-text data for an item.
|
|
472
|
+
|
|
473
|
+
<itemkey> should correspond to an existing attachment item.
|
|
474
|
+
payload should be a dict containing three keys:
|
|
475
|
+
'content': the full-text content and either
|
|
476
|
+
For text documents, 'indexedChars' and 'totalChars' OR
|
|
477
|
+
For PDFs, 'indexedPages' and 'totalPages'.
|
|
478
|
+
"""
|
|
479
|
+
headers = {"Content-Type": "application/json"}
|
|
480
|
+
return self.client.put(
|
|
481
|
+
url=build_url(
|
|
482
|
+
self.endpoint,
|
|
483
|
+
f"/{self.library_type}/{self.library_id}/items/{itemkey}/fulltext",
|
|
484
|
+
),
|
|
485
|
+
headers=headers,
|
|
486
|
+
json=payload,
|
|
487
|
+
)
|
|
488
|
+
|
|
489
|
+
def new_fulltext(self, since):
|
|
490
|
+
"""Retrieve list of full-text content items and versions newer than <since>."""
|
|
491
|
+
query_string = f"/{self.library_type}/{self.library_id}/fulltext"
|
|
492
|
+
headers = {}
|
|
493
|
+
params = {"since": since}
|
|
494
|
+
self._check_backoff()
|
|
495
|
+
resp = self.client.get(
|
|
496
|
+
build_url(self.endpoint, query_string),
|
|
497
|
+
params=params,
|
|
498
|
+
headers=headers,
|
|
499
|
+
)
|
|
500
|
+
try:
|
|
501
|
+
resp.raise_for_status()
|
|
502
|
+
except httpx.HTTPError as exc:
|
|
503
|
+
error_handler(self, resp, exc)
|
|
504
|
+
backoff = get_backoff_duration(self.request.headers)
|
|
505
|
+
if backoff:
|
|
506
|
+
self._set_backoff(backoff)
|
|
507
|
+
return resp.json()
|
|
508
|
+
|
|
509
|
+
def item_versions(self, **kwargs):
|
|
510
|
+
"""Return dict associating item keys to versions.
|
|
511
|
+
|
|
512
|
+
Accepts a since= parameter in kwargs to limit the data to those updated since.
|
|
513
|
+
"""
|
|
514
|
+
if "limit" not in kwargs:
|
|
515
|
+
kwargs["limit"] = None
|
|
516
|
+
kwargs["format"] = "versions"
|
|
517
|
+
return self.items(**kwargs)
|
|
518
|
+
|
|
519
|
+
def collection_versions(self, **kwargs):
|
|
520
|
+
"""Return dict associating collection keys to versions.
|
|
521
|
+
|
|
522
|
+
Accepts a since= parameter in kwargs to limit the data to those updated since.
|
|
523
|
+
"""
|
|
524
|
+
if "limit" not in kwargs:
|
|
525
|
+
kwargs["limit"] = None
|
|
526
|
+
kwargs["format"] = "versions"
|
|
527
|
+
return self.collections(**kwargs)
|
|
528
|
+
|
|
529
|
+
def last_modified_version(self, **kwargs):
|
|
530
|
+
"""Get the last modified user or group library version."""
|
|
531
|
+
# This MUST be a multiple-object request, limit param notwithstanding
|
|
532
|
+
self.items(limit=1)
|
|
533
|
+
lmv = self.request.headers.get("last-modified-version", 0)
|
|
534
|
+
return int(lmv)
|
|
535
|
+
|
|
536
|
+
@retrieve
|
|
537
|
+
def top(self, **kwargs):
|
|
538
|
+
"""Get user top-level items."""
|
|
539
|
+
query_string = "/{t}/{u}/items/top"
|
|
540
|
+
return self._build_query(query_string)
|
|
541
|
+
|
|
542
|
+
@retrieve
|
|
543
|
+
def trash(self, **kwargs):
|
|
544
|
+
"""Get all items in the trash."""
|
|
545
|
+
query_string = "/{t}/{u}/items/trash"
|
|
546
|
+
return self._build_query(query_string)
|
|
547
|
+
|
|
548
|
+
@retrieve
|
|
549
|
+
def searches(self, **kwargs):
|
|
550
|
+
"""Get saved searches."""
|
|
551
|
+
query_string = "/{t}/{u}/searches"
|
|
552
|
+
return self._build_query(query_string)
|
|
553
|
+
|
|
554
|
+
@retrieve
|
|
555
|
+
def deleted(self, **kwargs):
|
|
556
|
+
"""Get all deleted items (requires since= parameter)."""
|
|
557
|
+
if "limit" not in kwargs:
|
|
558
|
+
# Currently deleted API doesn't respect limit leaving it out by
|
|
559
|
+
# default preserves compat
|
|
560
|
+
kwargs["limit"] = None
|
|
561
|
+
query_string = "/{t}/{u}/deleted"
|
|
562
|
+
return self._build_query(query_string)
|
|
563
|
+
|
|
564
|
+
@retrieve
|
|
565
|
+
def item(self, item, **kwargs):
|
|
566
|
+
"""Get a specific item."""
|
|
567
|
+
query_string = f"/{self.library_type}/{self.library_id}/items/{item.upper()}"
|
|
568
|
+
return self._build_query(query_string)
|
|
569
|
+
|
|
570
|
+
@retrieve
|
|
571
|
+
def file(self, item, **kwargs):
|
|
572
|
+
"""Get the file from a specific item."""
|
|
573
|
+
query_string = (
|
|
574
|
+
f"/{self.library_type}/{self.library_id}/items/{item.upper()}/file"
|
|
575
|
+
)
|
|
576
|
+
return self._build_query(query_string, no_params=True)
|
|
577
|
+
|
|
578
|
+
def dump(self, itemkey, filename=None, path=None):
|
|
579
|
+
"""Dump a file attachment to disk, with optional filename and path."""
|
|
580
|
+
if not filename:
|
|
581
|
+
filename = self.item(itemkey)["data"]["filename"]
|
|
582
|
+
pth = Path(path) / filename if path else Path(filename)
|
|
583
|
+
file = self.file(itemkey)
|
|
584
|
+
if self.snapshot:
|
|
585
|
+
self.snapshot = False
|
|
586
|
+
pth = pth.parent / (pth.name + ".zip")
|
|
587
|
+
with pth.open("wb") as f:
|
|
588
|
+
f.write(file)
|
|
589
|
+
|
|
590
|
+
@retrieve
|
|
591
|
+
def children(self, item, **kwargs):
|
|
592
|
+
"""Get a specific item's child items."""
|
|
593
|
+
query_string = (
|
|
594
|
+
f"/{self.library_type}/{self.library_id}/items/{item.upper()}/children"
|
|
595
|
+
)
|
|
596
|
+
return self._build_query(query_string)
|
|
597
|
+
|
|
598
|
+
@retrieve
|
|
599
|
+
def collection_items(self, collection, **kwargs):
|
|
600
|
+
"""Get a specific collection's items."""
|
|
601
|
+
query_string = f"/{self.library_type}/{self.library_id}/collections/{collection.upper()}/items"
|
|
602
|
+
return self._build_query(query_string)
|
|
603
|
+
|
|
604
|
+
@retrieve
|
|
605
|
+
def collection_items_top(self, collection, **kwargs):
|
|
606
|
+
"""Get a specific collection's top-level items."""
|
|
607
|
+
query_string = f"/{self.library_type}/{self.library_id}/collections/{collection.upper()}/items/top"
|
|
608
|
+
return self._build_query(query_string)
|
|
609
|
+
|
|
610
|
+
@retrieve
|
|
611
|
+
def collection_tags(self, collection, **kwargs):
|
|
612
|
+
"""Get a specific collection's tags."""
|
|
613
|
+
query_string = f"/{self.library_type}/{self.library_id}/collections/{collection.upper()}/tags"
|
|
614
|
+
return self._build_query(query_string)
|
|
615
|
+
|
|
616
|
+
@retrieve
|
|
617
|
+
def collection(self, collection, **kwargs):
|
|
618
|
+
"""Get user collection."""
|
|
619
|
+
query_string = (
|
|
620
|
+
f"/{self.library_type}/{self.library_id}/collections/{collection.upper()}"
|
|
621
|
+
)
|
|
622
|
+
return self._build_query(query_string)
|
|
623
|
+
|
|
624
|
+
@retrieve
|
|
625
|
+
def collections(self, **kwargs):
|
|
626
|
+
"""Get user collections."""
|
|
627
|
+
query_string = "/{t}/{u}/collections"
|
|
628
|
+
return self._build_query(query_string)
|
|
629
|
+
|
|
630
|
+
def all_collections(self, collid=None):
|
|
631
|
+
"""Retrieve all collections and subcollections.
|
|
632
|
+
|
|
633
|
+
Works for top-level collections or for a specific collection.
|
|
634
|
+
Works at all collection depths.
|
|
635
|
+
"""
|
|
636
|
+
all_collections = []
|
|
637
|
+
|
|
638
|
+
def subcoll(clct):
|
|
639
|
+
"""Recursively add collections to a flat master list."""
|
|
640
|
+
all_collections.append(clct)
|
|
641
|
+
if clct["meta"].get("numCollections", 0) > 0:
|
|
642
|
+
# add collection to master list & recur with all child collections
|
|
643
|
+
for c in self.everything(self.collections_sub(clct["data"]["key"])):
|
|
644
|
+
subcoll(c)
|
|
645
|
+
|
|
646
|
+
# select all top-level collections or a specific collection and children
|
|
647
|
+
if collid:
|
|
648
|
+
toplevel = [self.collection(collid)]
|
|
649
|
+
else:
|
|
650
|
+
toplevel = self.everything(self.collections_top())
|
|
651
|
+
for collection in toplevel:
|
|
652
|
+
subcoll(collection)
|
|
653
|
+
return all_collections
|
|
654
|
+
|
|
655
|
+
@retrieve
|
|
656
|
+
def collections_top(self, **kwargs):
|
|
657
|
+
"""Get top-level user collections."""
|
|
658
|
+
query_string = "/{t}/{u}/collections/top"
|
|
659
|
+
return self._build_query(query_string)
|
|
660
|
+
|
|
661
|
+
@retrieve
|
|
662
|
+
def collections_sub(self, collection, **kwargs):
|
|
663
|
+
"""Get subcollections for a specific collection."""
|
|
664
|
+
query_string = f"/{self.library_type}/{self.library_id}/collections/{collection.upper()}/collections"
|
|
665
|
+
return self._build_query(query_string)
|
|
666
|
+
|
|
667
|
+
@retrieve
|
|
668
|
+
def groups(self, **kwargs):
|
|
669
|
+
"""Get user groups."""
|
|
670
|
+
query_string = "/users/{u}/groups"
|
|
671
|
+
return self._build_query(query_string)
|
|
672
|
+
|
|
673
|
+
@retrieve
|
|
674
|
+
def tags(self, **kwargs):
|
|
675
|
+
"""Get tags."""
|
|
676
|
+
query_string = "/{t}/{u}/tags"
|
|
677
|
+
self.tag_data = True
|
|
678
|
+
return self._build_query(query_string)
|
|
679
|
+
|
|
680
|
+
@retrieve
|
|
681
|
+
def item_tags(self, item, **kwargs):
|
|
682
|
+
"""Get tags for a specific item."""
|
|
683
|
+
query_string = (
|
|
684
|
+
f"/{self.library_type}/{self.library_id}/items/{item.upper()}/tags"
|
|
685
|
+
)
|
|
686
|
+
self.tag_data = True
|
|
687
|
+
return self._build_query(query_string)
|
|
688
|
+
|
|
689
|
+
def all_top(self, **kwargs):
|
|
690
|
+
"""Retrieve all top-level items."""
|
|
691
|
+
return self.everything(self.top(**kwargs))
|
|
692
|
+
|
|
693
|
+
@retrieve
|
|
694
|
+
def follow(self):
|
|
695
|
+
"""Return the result of the call to the URL in the 'Next' link."""
|
|
696
|
+
if n := self.links.get("next"):
|
|
697
|
+
return self._striplocal(n)
|
|
698
|
+
return None
|
|
699
|
+
|
|
700
|
+
def iterfollow(self):
|
|
701
|
+
"""Return generator for self.follow()."""
|
|
702
|
+
# use same criterion as self.follow()
|
|
703
|
+
while True:
|
|
704
|
+
if self.links.get("next"):
|
|
705
|
+
yield self.follow()
|
|
706
|
+
else:
|
|
707
|
+
return
|
|
708
|
+
|
|
709
|
+
def makeiter(self, func):
|
|
710
|
+
"""Return a generator of func's results."""
|
|
711
|
+
if self.links is None or "self" not in self.links:
|
|
712
|
+
msg = "makeiter() requires a previous API call with pagination links"
|
|
713
|
+
raise RuntimeError(msg)
|
|
714
|
+
# reset the link. This results in an extra API call, yes
|
|
715
|
+
self.links["next"] = self.links["self"]
|
|
716
|
+
return self.iterfollow()
|
|
717
|
+
|
|
718
|
+
def everything(self, query):
|
|
719
|
+
"""Retrieve all items in the library for a particular query.
|
|
720
|
+
|
|
721
|
+
This method will override the 'limit' parameter if it's been set.
|
|
722
|
+
"""
|
|
723
|
+
try:
|
|
724
|
+
items = []
|
|
725
|
+
items.extend(query)
|
|
726
|
+
while self.links.get("next"):
|
|
727
|
+
items.extend(self.follow())
|
|
728
|
+
except TypeError:
|
|
729
|
+
# we have a bibliography object ughh
|
|
730
|
+
items = copy.deepcopy(query)
|
|
731
|
+
while self.links.get("next"):
|
|
732
|
+
items.entries.extend(self.follow().entries)
|
|
733
|
+
return items
|
|
734
|
+
|
|
735
|
+
def get_subset(self, subset):
|
|
736
|
+
"""Retrieve a subset of items.
|
|
737
|
+
|
|
738
|
+
Accepts a single argument: a list of item IDs.
|
|
739
|
+
"""
|
|
740
|
+
if len(subset) > DEFAULT_NUM_ITEMS:
|
|
741
|
+
err = f"You may only retrieve {DEFAULT_NUM_ITEMS} items per call"
|
|
742
|
+
raise ze.TooManyItemsError(err)
|
|
743
|
+
# remember any url parameters that have been set
|
|
744
|
+
params = self.url_params
|
|
745
|
+
retr = []
|
|
746
|
+
for itm in subset:
|
|
747
|
+
retr.append(self.item(itm))
|
|
748
|
+
self.url_params = params
|
|
749
|
+
# clean up URL params when we're finished
|
|
750
|
+
self.url_params = None
|
|
751
|
+
return retr
|
|
752
|
+
|
|
753
|
+
# The following methods process data returned by Read API calls
|
|
754
|
+
def _json_processor(self, retrieved):
|
|
755
|
+
"""Format and return data from API calls which return Items."""
|
|
756
|
+
# send entries to _tags_data if there's no JSON
|
|
757
|
+
try:
|
|
758
|
+
items = [json.loads(e["content"][0]["value"]) for e in retrieved.entries]
|
|
759
|
+
except KeyError:
|
|
760
|
+
return self._tags_data(retrieved)
|
|
761
|
+
return items
|
|
762
|
+
|
|
763
|
+
def _csljson_processor(self, retrieved):
|
|
764
|
+
"""Return a list of dicts which are dumped CSL JSON."""
|
|
765
|
+
items = [
|
|
766
|
+
json.loads(entry["content"][0]["value"]) for entry in retrieved.entries
|
|
767
|
+
]
|
|
768
|
+
self.url_params = None
|
|
769
|
+
return items
|
|
770
|
+
|
|
771
|
+
def _bib_processor(self, retrieved):
|
|
772
|
+
"""Return a list of strings formatted as HTML bibliography entries."""
|
|
773
|
+
items = [bib["content"][0]["value"] for bib in retrieved.entries]
|
|
774
|
+
self.url_params = None
|
|
775
|
+
return items
|
|
776
|
+
|
|
777
|
+
def _citation_processor(self, retrieved):
|
|
778
|
+
"""Return a list of strings formatted as HTML citation entries."""
|
|
779
|
+
items = [cit["content"][0]["value"] for cit in retrieved.entries]
|
|
780
|
+
self.url_params = None
|
|
781
|
+
return items
|
|
782
|
+
|
|
783
|
+
def _tags_data(self, retrieved):
|
|
784
|
+
"""Format and return data from API calls which return Tags."""
|
|
785
|
+
self.url_params = None
|
|
786
|
+
return [t["tag"] for t in retrieved]
|
|
787
|
+
|
|
788
|
+
# The following methods are Write API calls
|
|
789
|
+
def item_template(self, itemtype, linkmode=None):
|
|
790
|
+
"""Get a template for a new item."""
|
|
791
|
+
# if we have a template and it hasn't been updated since we stored it
|
|
792
|
+
template_name = f"item_template_{itemtype}_{linkmode or ''}"
|
|
793
|
+
params = {"itemType": itemtype}
|
|
794
|
+
# Set linkMode parameter for API request if itemtype is attachment
|
|
795
|
+
if itemtype == "attachment":
|
|
796
|
+
params["linkMode"] = linkmode
|
|
797
|
+
self.add_parameters(**params)
|
|
798
|
+
query_string = "/items/new"
|
|
799
|
+
if self.templates.get(template_name) and not self._updated(
|
|
800
|
+
query_string,
|
|
801
|
+
self.templates[template_name],
|
|
802
|
+
template_name,
|
|
803
|
+
):
|
|
804
|
+
return copy.deepcopy(self.templates[template_name]["tmplt"])
|
|
805
|
+
# otherwise perform a normal request and cache the response
|
|
806
|
+
retrieved = self._retrieve_data(query_string)
|
|
807
|
+
return self._cache(retrieved, template_name)
|
|
808
|
+
|
|
809
|
+
def _attachment_template(self, attachment_type):
|
|
810
|
+
"""Return a new attachment template of the required type.
|
|
811
|
+
|
|
812
|
+
Types: imported_file, imported_url, linked_file, linked_url
|
|
813
|
+
"""
|
|
814
|
+
return self.item_template("attachment", linkmode=attachment_type)
|
|
815
|
+
|
|
816
|
+
def _attachment(self, payload, parentid=None):
|
|
817
|
+
"""Create attachments.
|
|
818
|
+
|
|
819
|
+
Accepts a list of one or more attachment template dicts
|
|
820
|
+
and an optional parent Item ID. If this is specified,
|
|
821
|
+
attachments are created under this ID.
|
|
822
|
+
"""
|
|
823
|
+
attachment = Zupload(self, payload, parentid)
|
|
824
|
+
return attachment.upload()
|
|
825
|
+
|
|
826
|
+
@ss_wrap
|
|
827
|
+
def show_operators(self):
|
|
828
|
+
"""Show available saved search operators."""
|
|
829
|
+
return self.savedsearch.operators
|
|
830
|
+
|
|
831
|
+
@ss_wrap
|
|
832
|
+
def show_conditions(self):
|
|
833
|
+
"""Show available saved search conditions."""
|
|
834
|
+
return self.savedsearch.conditions_operators.keys()
|
|
835
|
+
|
|
836
|
+
@ss_wrap
|
|
837
|
+
def show_condition_operators(self, condition):
|
|
838
|
+
"""Show available operators for a given saved search condition."""
|
|
839
|
+
# dict keys of allowed operators for the current condition
|
|
840
|
+
permitted_operators = self.savedsearch.conditions_operators.get(condition)
|
|
841
|
+
# transform these into values
|
|
842
|
+
return {self.savedsearch.operators.get(op) for op in permitted_operators}
|
|
843
|
+
|
|
844
|
+
@ss_wrap
|
|
845
|
+
def saved_search(self, name, conditions):
|
|
846
|
+
"""Create a saved search.
|
|
847
|
+
|
|
848
|
+
conditions is a list of dicts containing search conditions and must
|
|
849
|
+
contain the following str keys: condition, operator, value
|
|
850
|
+
"""
|
|
851
|
+
self.savedsearch._validate(conditions)
|
|
852
|
+
payload = [{"name": name, "conditions": conditions}]
|
|
853
|
+
headers = {"Zotero-Write-Token": token()}
|
|
854
|
+
self._check_backoff()
|
|
855
|
+
req = self.client.post(
|
|
856
|
+
url=build_url(
|
|
857
|
+
self.endpoint,
|
|
858
|
+
f"/{self.library_type}/{self.library_id}/searches",
|
|
859
|
+
),
|
|
860
|
+
headers=headers,
|
|
861
|
+
json=payload,
|
|
862
|
+
)
|
|
863
|
+
self.request = req
|
|
864
|
+
try:
|
|
865
|
+
req.raise_for_status()
|
|
866
|
+
except httpx.HTTPError as exc:
|
|
867
|
+
error_handler(self, req, exc)
|
|
868
|
+
backoff = get_backoff_duration(self.request.headers)
|
|
869
|
+
if backoff:
|
|
870
|
+
self._set_backoff(backoff)
|
|
871
|
+
return req.json()
|
|
872
|
+
|
|
873
|
+
@ss_wrap
|
|
874
|
+
def delete_saved_search(self, keys):
|
|
875
|
+
"""Delete one or more saved searches.
|
|
876
|
+
|
|
877
|
+
Pass a list of one or more unique search keys.
|
|
878
|
+
"""
|
|
879
|
+
headers = {"Zotero-Write-Token": token()}
|
|
880
|
+
self._check_backoff()
|
|
881
|
+
req = self.client.delete(
|
|
882
|
+
url=build_url(
|
|
883
|
+
self.endpoint,
|
|
884
|
+
f"/{self.library_type}/{self.library_id}/searches",
|
|
885
|
+
),
|
|
886
|
+
headers=headers,
|
|
887
|
+
params={"searchKey": ",".join(keys)},
|
|
888
|
+
)
|
|
889
|
+
self.request = req
|
|
890
|
+
try:
|
|
891
|
+
req.raise_for_status()
|
|
892
|
+
except httpx.HTTPError as exc:
|
|
893
|
+
error_handler(self, req, exc)
|
|
894
|
+
backoff = get_backoff_duration(self.request.headers)
|
|
895
|
+
if backoff:
|
|
896
|
+
self._set_backoff(backoff)
|
|
897
|
+
return req.status_code
|
|
898
|
+
|
|
899
|
+
def upload_attachments(self, attachments, parentid=None, basedir=None):
|
|
900
|
+
"""Upload files to the already created (but never uploaded) attachments."""
|
|
901
|
+
return Zupload(self, attachments, parentid, basedir=basedir).upload()
|
|
902
|
+
|
|
903
|
+
def add_tags(self, item, *tags):
|
|
904
|
+
"""Add one or more tags to a retrieved item, then update it on the server.
|
|
905
|
+
|
|
906
|
+
Accepts a dict, and one or more tags to add to it.
|
|
907
|
+
Returns the updated item from the server.
|
|
908
|
+
"""
|
|
909
|
+
# Make sure there's a tags field, or add one
|
|
910
|
+
if not item.get("data", {}).get("tags"):
|
|
911
|
+
item["data"]["tags"] = []
|
|
912
|
+
for tag in tags:
|
|
913
|
+
item["data"]["tags"].append({"tag": f"{tag}"})
|
|
914
|
+
# make sure everything's OK
|
|
915
|
+
self.check_items([item])
|
|
916
|
+
return self.update_item(item)
|
|
917
|
+
|
|
918
|
+
def check_items(self, items):
|
|
919
|
+
"""Check that items to be created contain no invalid dict keys.
|
|
920
|
+
|
|
921
|
+
Accepts a single argument: a list of one or more dicts.
|
|
922
|
+
The retrieved fields are cached and re-used until a 304 call fails.
|
|
923
|
+
"""
|
|
924
|
+
params = {"locale": self.locale, "timeout": DEFAULT_TIMEOUT}
|
|
925
|
+
query_string = "/itemFields"
|
|
926
|
+
r = Request(
|
|
927
|
+
"GET",
|
|
928
|
+
build_url(self.endpoint, query_string),
|
|
929
|
+
params=params,
|
|
930
|
+
)
|
|
931
|
+
response = self.client.send(r)
|
|
932
|
+
# now split up the URL
|
|
933
|
+
result = urlparse(str(response.url))
|
|
934
|
+
# construct cache key
|
|
935
|
+
cachekey = result.path + "_" + result.query
|
|
936
|
+
if self.templates.get(cachekey) and not self._updated(
|
|
937
|
+
query_string,
|
|
938
|
+
self.templates[cachekey],
|
|
939
|
+
cachekey,
|
|
940
|
+
):
|
|
941
|
+
template = {t["field"] for t in self.templates[cachekey]["tmplt"]}
|
|
942
|
+
else:
|
|
943
|
+
template = {t["field"] for t in self.item_fields()}
|
|
944
|
+
# add fields we know to be OK
|
|
945
|
+
template |= {
|
|
946
|
+
"path",
|
|
947
|
+
"tags",
|
|
948
|
+
"notes",
|
|
949
|
+
"itemType",
|
|
950
|
+
"creators",
|
|
951
|
+
"mimeType",
|
|
952
|
+
"linkMode",
|
|
953
|
+
"note",
|
|
954
|
+
"charset",
|
|
955
|
+
"dateAdded",
|
|
956
|
+
"version",
|
|
957
|
+
"collections",
|
|
958
|
+
"dateModified",
|
|
959
|
+
"relations",
|
|
960
|
+
# attachment items
|
|
961
|
+
"parentItem",
|
|
962
|
+
"mtime",
|
|
963
|
+
"contentType",
|
|
964
|
+
"md5",
|
|
965
|
+
"filename",
|
|
966
|
+
"inPublications",
|
|
967
|
+
# annotation fields
|
|
968
|
+
"annotationText",
|
|
969
|
+
"annotationColor",
|
|
970
|
+
"annotationType",
|
|
971
|
+
"annotationPageLabel",
|
|
972
|
+
"annotationPosition",
|
|
973
|
+
"annotationSortIndex",
|
|
974
|
+
"annotationComment",
|
|
975
|
+
"annotationAuthorName",
|
|
976
|
+
}
|
|
977
|
+
template |= set(self.temp_keys)
|
|
978
|
+
processed_items = []
|
|
979
|
+
for pos, item in enumerate(items):
|
|
980
|
+
if set(item) == {"links", "library", "version", "meta", "key", "data"}:
|
|
981
|
+
itm = item["data"]
|
|
982
|
+
else:
|
|
983
|
+
itm = item
|
|
984
|
+
to_check = set(itm.keys())
|
|
985
|
+
difference = to_check.difference(template)
|
|
986
|
+
if difference:
|
|
987
|
+
err = f"Invalid keys present in item {pos + 1}: {' '.join(i for i in difference)}"
|
|
988
|
+
raise ze.InvalidItemFieldsError(err)
|
|
989
|
+
processed_items.append(itm)
|
|
990
|
+
return processed_items
|
|
991
|
+
|
|
992
|
+
@tcache
|
|
993
|
+
def item_types(self):
|
|
994
|
+
"""Get all available item types."""
|
|
995
|
+
# Check for a valid cached version
|
|
996
|
+
params = {"locale": self.locale}
|
|
997
|
+
query_string = "/itemTypes"
|
|
998
|
+
return query_string, params
|
|
999
|
+
|
|
1000
|
+
@tcache
|
|
1001
|
+
def creator_fields(self):
|
|
1002
|
+
"""Get localised creator fields."""
|
|
1003
|
+
# Check for a valid cached version
|
|
1004
|
+
params = {"locale": self.locale}
|
|
1005
|
+
query_string = "/creatorFields"
|
|
1006
|
+
return query_string, params
|
|
1007
|
+
|
|
1008
|
+
@tcache
|
|
1009
|
+
def item_type_fields(self, itemtype):
|
|
1010
|
+
"""Get all valid fields for an item."""
|
|
1011
|
+
params = {"itemType": itemtype, "locale": self.locale}
|
|
1012
|
+
query_string = "/itemTypeFields"
|
|
1013
|
+
return query_string, params
|
|
1014
|
+
|
|
1015
|
+
@tcache
|
|
1016
|
+
def item_creator_types(self, itemtype):
|
|
1017
|
+
"""Get all available creator types for an item."""
|
|
1018
|
+
params = {"itemType": itemtype, "locale": self.locale}
|
|
1019
|
+
query_string = "/itemTypeCreatorTypes"
|
|
1020
|
+
return query_string, params
|
|
1021
|
+
|
|
1022
|
+
@tcache
|
|
1023
|
+
def item_fields(self):
|
|
1024
|
+
"""Get all available item fields."""
|
|
1025
|
+
# Check for a valid cached version
|
|
1026
|
+
params = {"locale": self.locale}
|
|
1027
|
+
query_string = "/itemFields"
|
|
1028
|
+
return query_string, params
|
|
1029
|
+
|
|
1030
|
+
@staticmethod
|
|
1031
|
+
def item_attachment_link_modes():
|
|
1032
|
+
"""Get all available link mode types.
|
|
1033
|
+
|
|
1034
|
+
Note: No viable REST API route was found for this, so I tested and built
|
|
1035
|
+
a list from documentation found here:
|
|
1036
|
+
https://www.zotero.org/support/dev/web_api/json
|
|
1037
|
+
"""
|
|
1038
|
+
return ["imported_file", "imported_url", "linked_file", "linked_url"]
|
|
1039
|
+
|
|
1040
|
+
def create_items(self, payload, parentid=None, last_modified=None):
|
|
1041
|
+
"""Create new Zotero items.
|
|
1042
|
+
|
|
1043
|
+
Accepts two arguments:
|
|
1044
|
+
a list containing one or more item dicts
|
|
1045
|
+
an optional parent item ID.
|
|
1046
|
+
Note that this can also be used to update existing items.
|
|
1047
|
+
"""
|
|
1048
|
+
if len(payload) > DEFAULT_NUM_ITEMS:
|
|
1049
|
+
msg = f"You may only create up to {DEFAULT_NUM_ITEMS} items per call"
|
|
1050
|
+
raise ze.TooManyItemsError(msg)
|
|
1051
|
+
# TODO: strip extra data if it's an existing item
|
|
1052
|
+
headers = {"Zotero-Write-Token": token(), "Content-Type": "application/json"}
|
|
1053
|
+
if last_modified is not None:
|
|
1054
|
+
headers["If-Unmodified-Since-Version"] = str(last_modified)
|
|
1055
|
+
to_send = list(self._cleanup(*payload, allow=("key")))
|
|
1056
|
+
self._check_backoff()
|
|
1057
|
+
req = self.client.post(
|
|
1058
|
+
url=build_url(
|
|
1059
|
+
self.endpoint,
|
|
1060
|
+
f"/{self.library_type}/{self.library_id}/items",
|
|
1061
|
+
),
|
|
1062
|
+
content=json.dumps(to_send),
|
|
1063
|
+
headers=headers,
|
|
1064
|
+
)
|
|
1065
|
+
self.request = req
|
|
1066
|
+
try:
|
|
1067
|
+
req.raise_for_status()
|
|
1068
|
+
except httpx.HTTPError as exc:
|
|
1069
|
+
error_handler(self, req, exc)
|
|
1070
|
+
resp = req.json()
|
|
1071
|
+
backoff = get_backoff_duration(self.request.headers)
|
|
1072
|
+
if backoff:
|
|
1073
|
+
self._set_backoff(backoff)
|
|
1074
|
+
if parentid:
|
|
1075
|
+
# we need to create child items using PATCH
|
|
1076
|
+
# TODO: handle possibility of item creation + failed parent attachment
|
|
1077
|
+
uheaders = {
|
|
1078
|
+
"If-Unmodified-Since-Version": req.headers["last-modified-version"],
|
|
1079
|
+
}
|
|
1080
|
+
for value in resp["success"].values():
|
|
1081
|
+
payload = {"parentItem": parentid}
|
|
1082
|
+
self._check_backoff()
|
|
1083
|
+
presp = self.client.patch(
|
|
1084
|
+
url=build_url(
|
|
1085
|
+
self.endpoint,
|
|
1086
|
+
f"/{self.library_type}/{self.library_id}/items/{value}",
|
|
1087
|
+
),
|
|
1088
|
+
json=payload,
|
|
1089
|
+
headers=dict(uheaders),
|
|
1090
|
+
)
|
|
1091
|
+
self.request = presp
|
|
1092
|
+
try:
|
|
1093
|
+
presp.raise_for_status()
|
|
1094
|
+
except httpx.HTTPError as exc:
|
|
1095
|
+
error_handler(self, presp, exc)
|
|
1096
|
+
backoff = get_backoff_duration(presp.headers)
|
|
1097
|
+
if backoff:
|
|
1098
|
+
self._set_backoff(backoff)
|
|
1099
|
+
return resp
|
|
1100
|
+
|
|
1101
|
+
def create_collection(self, payload, last_modified=None):
|
|
1102
|
+
"""Alias for create_collections to preserve backward compatibility."""
|
|
1103
|
+
return self.create_collections(payload, last_modified)
|
|
1104
|
+
|
|
1105
|
+
def create_collections(self, payload, last_modified=None):
|
|
1106
|
+
"""Create new Zotero collections.
|
|
1107
|
+
|
|
1108
|
+
Accepts one argument, a list of dicts containing the following keys:
|
|
1109
|
+
'name': the name of the collection
|
|
1110
|
+
'parentCollection': OPTIONAL, the parent collection to which you wish to add this
|
|
1111
|
+
"""
|
|
1112
|
+
# no point in proceeding if there's no 'name' key
|
|
1113
|
+
for item in payload:
|
|
1114
|
+
if "name" not in item:
|
|
1115
|
+
msg = "The dict you pass must include a 'name' key"
|
|
1116
|
+
raise ze.ParamNotPassedError(msg)
|
|
1117
|
+
# add a blank 'parentCollection' key if it hasn't been passed
|
|
1118
|
+
if "parentCollection" not in item:
|
|
1119
|
+
item["parentCollection"] = ""
|
|
1120
|
+
headers = {"Zotero-Write-Token": token()}
|
|
1121
|
+
if last_modified is not None:
|
|
1122
|
+
headers["If-Unmodified-Since-Version"] = str(last_modified)
|
|
1123
|
+
self._check_backoff()
|
|
1124
|
+
req = self.client.post(
|
|
1125
|
+
url=build_url(
|
|
1126
|
+
self.endpoint,
|
|
1127
|
+
f"/{self.library_type}/{self.library_id}/collections",
|
|
1128
|
+
),
|
|
1129
|
+
headers=headers,
|
|
1130
|
+
content=json.dumps(payload),
|
|
1131
|
+
)
|
|
1132
|
+
self.request = req
|
|
1133
|
+
try:
|
|
1134
|
+
req.raise_for_status()
|
|
1135
|
+
except httpx.HTTPError as exc:
|
|
1136
|
+
error_handler(self, req, exc)
|
|
1137
|
+
backoff = get_backoff_duration(req.headers)
|
|
1138
|
+
if backoff:
|
|
1139
|
+
self._set_backoff(backoff)
|
|
1140
|
+
return req.json()
|
|
1141
|
+
|
|
1142
|
+
@backoff_check
|
|
1143
|
+
def update_collection(self, payload, last_modified=None):
|
|
1144
|
+
"""Update a Zotero collection property such as 'name'.
|
|
1145
|
+
|
|
1146
|
+
Accepts one argument, a dict containing collection data retrieved
|
|
1147
|
+
using e.g. 'collections()'.
|
|
1148
|
+
"""
|
|
1149
|
+
modified = payload["version"]
|
|
1150
|
+
if last_modified is not None:
|
|
1151
|
+
modified = last_modified
|
|
1152
|
+
key = payload["key"]
|
|
1153
|
+
headers = {"If-Unmodified-Since-Version": str(modified)}
|
|
1154
|
+
headers.update({"Content-Type": "application/json"})
|
|
1155
|
+
return self.client.put(
|
|
1156
|
+
url=build_url(
|
|
1157
|
+
self.endpoint,
|
|
1158
|
+
f"/{self.library_type}/{self.library_id}/collections/{key}",
|
|
1159
|
+
),
|
|
1160
|
+
headers=headers,
|
|
1161
|
+
content=json.dumps(payload),
|
|
1162
|
+
)
|
|
1163
|
+
|
|
1164
|
+
def attachment_simple(self, files, parentid=None):
|
|
1165
|
+
"""Add attachments using filenames as title.
|
|
1166
|
+
|
|
1167
|
+
Args:
|
|
1168
|
+
files: One or more file paths to add as attachments.
|
|
1169
|
+
parentid: Optional Item ID to create child attachments.
|
|
1170
|
+
|
|
1171
|
+
"""
|
|
1172
|
+
orig = self._attachment_template("imported_file")
|
|
1173
|
+
to_add = [orig.copy() for _ in files]
|
|
1174
|
+
for idx, tmplt in enumerate(to_add):
|
|
1175
|
+
tmplt["title"] = Path(files[idx]).name
|
|
1176
|
+
tmplt["filename"] = files[idx]
|
|
1177
|
+
if parentid:
|
|
1178
|
+
return self._attachment(to_add, parentid)
|
|
1179
|
+
return self._attachment(to_add)
|
|
1180
|
+
|
|
1181
|
+
def attachment_both(self, files, parentid=None):
|
|
1182
|
+
"""Add child attachments using title, filename.
|
|
1183
|
+
|
|
1184
|
+
Args:
|
|
1185
|
+
files: One or more lists or tuples containing (title, file path).
|
|
1186
|
+
parentid: Optional Item ID to create child attachments.
|
|
1187
|
+
|
|
1188
|
+
"""
|
|
1189
|
+
orig = self._attachment_template("imported_file")
|
|
1190
|
+
to_add = [orig.copy() for _ in files]
|
|
1191
|
+
for idx, tmplt in enumerate(to_add):
|
|
1192
|
+
tmplt["title"] = files[idx][0]
|
|
1193
|
+
tmplt["filename"] = files[idx][1]
|
|
1194
|
+
if parentid:
|
|
1195
|
+
return self._attachment(to_add, parentid)
|
|
1196
|
+
return self._attachment(to_add)
|
|
1197
|
+
|
|
1198
|
+
@backoff_check
|
|
1199
|
+
def update_item(self, payload, last_modified=None):
|
|
1200
|
+
"""Update an existing item.
|
|
1201
|
+
|
|
1202
|
+
Accepts one argument, a dict containing Item data.
|
|
1203
|
+
"""
|
|
1204
|
+
to_send = self.check_items([payload])[0]
|
|
1205
|
+
modified = payload["version"] if last_modified is None else last_modified
|
|
1206
|
+
ident = payload["key"]
|
|
1207
|
+
headers = {"If-Unmodified-Since-Version": str(modified)}
|
|
1208
|
+
return self.client.patch(
|
|
1209
|
+
url=build_url(
|
|
1210
|
+
self.endpoint,
|
|
1211
|
+
f"/{self.library_type}/{self.library_id}/items/{ident}",
|
|
1212
|
+
),
|
|
1213
|
+
headers=headers,
|
|
1214
|
+
content=json.dumps(to_send),
|
|
1215
|
+
)
|
|
1216
|
+
|
|
1217
|
+
def update_items(self, payload):
|
|
1218
|
+
"""Update existing items.
|
|
1219
|
+
|
|
1220
|
+
Accepts one argument, a list of dicts containing Item data.
|
|
1221
|
+
"""
|
|
1222
|
+
to_send = [self.check_items([p])[0] for p in payload]
|
|
1223
|
+
# the API only accepts 50 items at a time, so we have to split anything longer
|
|
1224
|
+
for chunk in chunks(to_send, DEFAULT_NUM_ITEMS):
|
|
1225
|
+
self._check_backoff()
|
|
1226
|
+
req = self.client.post(
|
|
1227
|
+
url=build_url(
|
|
1228
|
+
self.endpoint,
|
|
1229
|
+
f"/{self.library_type}/{self.library_id}/items/",
|
|
1230
|
+
),
|
|
1231
|
+
json=chunk,
|
|
1232
|
+
)
|
|
1233
|
+
self.request = req
|
|
1234
|
+
try:
|
|
1235
|
+
req.raise_for_status()
|
|
1236
|
+
except httpx.HTTPError as exc:
|
|
1237
|
+
error_handler(self, req, exc)
|
|
1238
|
+
backoff = get_backoff_duration(req.headers)
|
|
1239
|
+
if backoff:
|
|
1240
|
+
self._set_backoff(backoff)
|
|
1241
|
+
return True
|
|
1242
|
+
|
|
1243
|
+
def update_collections(self, payload):
|
|
1244
|
+
"""Update existing collections.
|
|
1245
|
+
|
|
1246
|
+
Accepts one argument, a list of dicts containing Collection data.
|
|
1247
|
+
"""
|
|
1248
|
+
to_send = [self.check_items([p])[0] for p in payload]
|
|
1249
|
+
# the API only accepts 50 items at a time, so we have to split anything longer
|
|
1250
|
+
for chunk in chunks(to_send, DEFAULT_NUM_ITEMS):
|
|
1251
|
+
self._check_backoff()
|
|
1252
|
+
req = self.client.post(
|
|
1253
|
+
url=build_url(
|
|
1254
|
+
self.endpoint,
|
|
1255
|
+
f"/{self.library_type}/{self.library_id}/collections/",
|
|
1256
|
+
),
|
|
1257
|
+
json=chunk,
|
|
1258
|
+
)
|
|
1259
|
+
self.request = req
|
|
1260
|
+
try:
|
|
1261
|
+
req.raise_for_status()
|
|
1262
|
+
except httpx.HTTPError as exc:
|
|
1263
|
+
error_handler(self, req, exc)
|
|
1264
|
+
backoff = get_backoff_duration(req.headers)
|
|
1265
|
+
if backoff:
|
|
1266
|
+
self._set_backoff(backoff)
|
|
1267
|
+
return True
|
|
1268
|
+
|
|
1269
|
+
@backoff_check
|
|
1270
|
+
def addto_collection(self, collection, payload):
|
|
1271
|
+
"""Add item to a collection.
|
|
1272
|
+
|
|
1273
|
+
Accepts two arguments: The collection ID, and an item dict.
|
|
1274
|
+
"""
|
|
1275
|
+
ident = payload["key"]
|
|
1276
|
+
modified = payload["version"]
|
|
1277
|
+
# add the collection data from the item
|
|
1278
|
+
modified_collections = payload["data"]["collections"] + [collection]
|
|
1279
|
+
headers = {"If-Unmodified-Since-Version": str(modified)}
|
|
1280
|
+
return self.client.patch(
|
|
1281
|
+
url=build_url(
|
|
1282
|
+
self.endpoint,
|
|
1283
|
+
f"/{self.library_type}/{self.library_id}/items/{ident}",
|
|
1284
|
+
),
|
|
1285
|
+
json={"collections": modified_collections},
|
|
1286
|
+
headers=headers,
|
|
1287
|
+
)
|
|
1288
|
+
|
|
1289
|
+
@backoff_check
|
|
1290
|
+
def deletefrom_collection(self, collection, payload):
|
|
1291
|
+
"""Delete an item from a collection.
|
|
1292
|
+
|
|
1293
|
+
Accepts two arguments: The collection ID, and an item dict.
|
|
1294
|
+
"""
|
|
1295
|
+
ident = payload["key"]
|
|
1296
|
+
modified = payload["version"]
|
|
1297
|
+
# strip the collection data from the item
|
|
1298
|
+
modified_collections = [
|
|
1299
|
+
c for c in payload["data"]["collections"] if c != collection
|
|
1300
|
+
]
|
|
1301
|
+
headers = {"If-Unmodified-Since-Version": str(modified)}
|
|
1302
|
+
return self.client.patch(
|
|
1303
|
+
url=build_url(
|
|
1304
|
+
self.endpoint,
|
|
1305
|
+
f"/{self.library_type}/{self.library_id}/items/{ident}",
|
|
1306
|
+
),
|
|
1307
|
+
json={"collections": modified_collections},
|
|
1308
|
+
headers=headers,
|
|
1309
|
+
)
|
|
1310
|
+
|
|
1311
|
+
@backoff_check
|
|
1312
|
+
def delete_tags(self, *payload):
|
|
1313
|
+
"""Delete a group of tags.
|
|
1314
|
+
|
|
1315
|
+
Pass in up to 50 tags, or use *[tags].
|
|
1316
|
+
"""
|
|
1317
|
+
if len(payload) > DEFAULT_NUM_ITEMS:
|
|
1318
|
+
msg = f"Only {DEFAULT_NUM_ITEMS} tags or fewer may be deleted"
|
|
1319
|
+
raise ze.TooManyItemsError(msg)
|
|
1320
|
+
modified_tags = " || ".join(list(payload))
|
|
1321
|
+
# first, get version data by getting one tag
|
|
1322
|
+
self.tags(limit=1)
|
|
1323
|
+
headers = {
|
|
1324
|
+
"If-Unmodified-Since-Version": self.request.headers[
|
|
1325
|
+
"last-modified-version"
|
|
1326
|
+
],
|
|
1327
|
+
}
|
|
1328
|
+
return self.client.delete(
|
|
1329
|
+
url=build_url(
|
|
1330
|
+
self.endpoint,
|
|
1331
|
+
f"/{self.library_type}/{self.library_id}/tags",
|
|
1332
|
+
),
|
|
1333
|
+
params={"tag": modified_tags},
|
|
1334
|
+
headers=headers,
|
|
1335
|
+
)
|
|
1336
|
+
|
|
1337
|
+
@backoff_check
|
|
1338
|
+
def delete_item(self, payload, last_modified=None):
|
|
1339
|
+
"""Delete Items from a Zotero library.
|
|
1340
|
+
|
|
1341
|
+
Accepts a single argument:
|
|
1342
|
+
a dict containing item data
|
|
1343
|
+
OR a list of dicts containing item data
|
|
1344
|
+
"""
|
|
1345
|
+
params = None
|
|
1346
|
+
if isinstance(payload, list):
|
|
1347
|
+
params = {"itemKey": ",".join([p["key"] for p in payload])}
|
|
1348
|
+
if last_modified is not None:
|
|
1349
|
+
modified = last_modified
|
|
1350
|
+
else:
|
|
1351
|
+
modified = payload[0]["version"]
|
|
1352
|
+
url = build_url(
|
|
1353
|
+
self.endpoint,
|
|
1354
|
+
f"/{self.library_type}/{self.library_id}/items",
|
|
1355
|
+
)
|
|
1356
|
+
else:
|
|
1357
|
+
ident = payload["key"]
|
|
1358
|
+
if last_modified is not None:
|
|
1359
|
+
modified = last_modified
|
|
1360
|
+
else:
|
|
1361
|
+
modified = payload["version"]
|
|
1362
|
+
url = build_url(
|
|
1363
|
+
self.endpoint,
|
|
1364
|
+
f"/{self.library_type}/{self.library_id}/items/{ident}",
|
|
1365
|
+
)
|
|
1366
|
+
headers = {"If-Unmodified-Since-Version": str(modified)}
|
|
1367
|
+
return self.client.delete(url=url, params=params, headers=headers)
|
|
1368
|
+
|
|
1369
|
+
@backoff_check
|
|
1370
|
+
def delete_collection(self, payload, last_modified=None):
|
|
1371
|
+
"""Delete a Collection from a Zotero library.
|
|
1372
|
+
|
|
1373
|
+
Accepts a single argument:
|
|
1374
|
+
a dict containing item data
|
|
1375
|
+
OR a list of dicts containing item data
|
|
1376
|
+
"""
|
|
1377
|
+
params = None
|
|
1378
|
+
if isinstance(payload, list):
|
|
1379
|
+
params = {"collectionKey": ",".join([p["key"] for p in payload])}
|
|
1380
|
+
if last_modified is not None:
|
|
1381
|
+
modified = last_modified
|
|
1382
|
+
else:
|
|
1383
|
+
modified = payload[0]["version"]
|
|
1384
|
+
url = build_url(
|
|
1385
|
+
self.endpoint,
|
|
1386
|
+
f"/{self.library_type}/{self.library_id}/collections",
|
|
1387
|
+
)
|
|
1388
|
+
else:
|
|
1389
|
+
ident = payload["key"]
|
|
1390
|
+
if last_modified is not None:
|
|
1391
|
+
modified = last_modified
|
|
1392
|
+
else:
|
|
1393
|
+
modified = payload["version"]
|
|
1394
|
+
url = build_url(
|
|
1395
|
+
self.endpoint,
|
|
1396
|
+
f"/{self.library_type}/{self.library_id}/collections/{ident}",
|
|
1397
|
+
)
|
|
1398
|
+
headers = {"If-Unmodified-Since-Version": str(modified)}
|
|
1399
|
+
return self.client.delete(url=url, params=params, headers=headers)
|
|
1400
|
+
|
|
1401
|
+
|
|
1402
|
+
__all__ = ["Zotero"]
|