rm-api 1.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- rm_api/__init__.py +516 -0
- rm_api/auth.py +74 -0
- rm_api/defaults.py +27 -0
- rm_api/helpers.py +59 -0
- rm_api/models.py +1284 -0
- rm_api/notifications/__init__.py +3 -0
- rm_api/notifications/models.py +179 -0
- rm_api/notifications/websocket.py +50 -0
- rm_api/storage/__init__.py +0 -0
- rm_api/storage/common.py +142 -0
- rm_api/storage/exceptions.py +8 -0
- rm_api/storage/new_sync.py +28 -0
- rm_api/storage/old_sync.py +56 -0
- rm_api/storage/v3.py +467 -0
- rm_api/sync_stages.py +18 -0
- rm_api/templates.py +1 -0
- rm_api/tests/__init__.py +9 -0
- rm_api/tests/common.py +35 -0
- rm_api/tests/files/content/first_page.json +222 -0
- rm_api/tests/files/content/last_page_visited.json +235 -0
- rm_api/tests/files/content/pdf_redirect.json +198 -0
- rm_api/tests/files/content/pdf_zoom_custom.json +334 -0
- rm_api/tests/files/content/pdf_zoom_height.json +254 -0
- rm_api/tests/files/content/pdf_zoom_width.json +334 -0
- rm_api/tests/files/metadata/001.json +15 -0
- rm_api/tests/test_001_models.py +41 -0
- rm_api-1.0.0.dist-info/LICENSE +20 -0
- rm_api-1.0.0.dist-info/METADATA +54 -0
- rm_api-1.0.0.dist-info/RECORD +30 -0
- rm_api-1.0.0.dist-info/WHEEL +4 -0
rm_api/__init__.py
ADDED
|
@@ -0,0 +1,516 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
import logging
|
|
3
|
+
import os
|
|
4
|
+
import threading
|
|
5
|
+
import time
|
|
6
|
+
from concurrent.futures import ThreadPoolExecutor
|
|
7
|
+
from traceback import print_exc
|
|
8
|
+
from typing import Dict, List, Union
|
|
9
|
+
|
|
10
|
+
import colorama
|
|
11
|
+
import requests
|
|
12
|
+
from requests.adapters import HTTPAdapter
|
|
13
|
+
from urllib3 import Retry
|
|
14
|
+
|
|
15
|
+
from .auth import MissingTabletLink, get_token, refresh_token
|
|
16
|
+
from .models import DocumentCollection, Document, Metadata, Content, make_uuid, File, make_hash
|
|
17
|
+
from .notifications import handle_notifications
|
|
18
|
+
from .notifications.models import FileSyncProgress, SyncRefresh, DocumentSyncProgress, NewDocuments, APIFatal, \
|
|
19
|
+
DownloadOperation
|
|
20
|
+
from .storage.common import get_document_storage_uri, get_document_notifications_uri
|
|
21
|
+
from .storage.new_sync import get_documents_new_sync, handle_new_api_steps
|
|
22
|
+
from .storage.new_sync import get_root as get_root_new
|
|
23
|
+
from .storage.old_sync import get_documents_old_sync, update_root, RootUploadFailure
|
|
24
|
+
from .storage.old_sync import get_root as get_root_old
|
|
25
|
+
from .storage.v3 import get_documents_using_root, get_file, get_file_contents, make_files_request, put_file, \
|
|
26
|
+
check_file_exists
|
|
27
|
+
from .sync_stages import *
|
|
28
|
+
|
|
29
|
+
colorama.init()
|
|
30
|
+
|
|
31
|
+
DEFAULT_REMARKABLE_URI = "https://webapp.cloud.remarkable.com/"
|
|
32
|
+
DEFAULT_REMARKABLE_DISCOVERY_URI = "https://service-manager-production-dot-remarkable-production.appspot.com/"
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
class API:
|
|
36
|
+
document_collections: Dict[str, DocumentCollection]
|
|
37
|
+
documents: Dict[str, Document]
|
|
38
|
+
|
|
39
|
+
def __init__(self, require_token: bool = True, token_file_path: str = 'token', sync_file_path: str = 'sync',
|
|
40
|
+
uri: str = None, discovery_uri: str = None, author_id: str = None, log_file='rm_api.log'):
|
|
41
|
+
self.retry_strategy = Retry(
|
|
42
|
+
total=10,
|
|
43
|
+
backoff_factor=2,
|
|
44
|
+
status_forcelist=(429, 503)
|
|
45
|
+
)
|
|
46
|
+
http_adapter = HTTPAdapter(max_retries=self.retry_strategy)
|
|
47
|
+
self.session = requests.Session()
|
|
48
|
+
self.session.mount("http://", http_adapter)
|
|
49
|
+
self.session.mount("https://", http_adapter)
|
|
50
|
+
|
|
51
|
+
self.token_file_path = token_file_path
|
|
52
|
+
if not author_id:
|
|
53
|
+
self.author_id = make_uuid()
|
|
54
|
+
else:
|
|
55
|
+
self.author_id = author_id
|
|
56
|
+
self.uri = uri or os.environ.get("URI", DEFAULT_REMARKABLE_URI)
|
|
57
|
+
self.discovery_uri = discovery_uri or os.environ.get("DISCOVERY_URI", DEFAULT_REMARKABLE_DISCOVERY_URI)
|
|
58
|
+
self.sync_file_path = sync_file_path
|
|
59
|
+
if self.sync_file_path is not None:
|
|
60
|
+
os.makedirs(self.sync_file_path, exist_ok=True)
|
|
61
|
+
self.last_root = None
|
|
62
|
+
self.offline_mode = False
|
|
63
|
+
self.document_storage_uri = None
|
|
64
|
+
self.document_notifications_uri = None
|
|
65
|
+
self._upload_lock = threading.Lock()
|
|
66
|
+
self._hook_lock = threading.Lock()
|
|
67
|
+
self.sync_notifiers: int = 0
|
|
68
|
+
self.download_operations = []
|
|
69
|
+
self._hook_list = {} # Used for event hooks
|
|
70
|
+
self._use_new_sync = False
|
|
71
|
+
# noinspection PyTypeChecker
|
|
72
|
+
self.document_collections = {}
|
|
73
|
+
# noinspection PyTypeChecker
|
|
74
|
+
self.documents = {}
|
|
75
|
+
self._token = None
|
|
76
|
+
self.debug = False
|
|
77
|
+
self.ignore_error_protection = False
|
|
78
|
+
self.connected_to_notifications = False
|
|
79
|
+
self.require_token = require_token
|
|
80
|
+
if not self.uri.endswith("/"):
|
|
81
|
+
self.uri += "/"
|
|
82
|
+
if not self.discovery_uri.endswith("/"):
|
|
83
|
+
self.discovery_uri += "/"
|
|
84
|
+
token = os.environ.get("TOKEN")
|
|
85
|
+
if token is None:
|
|
86
|
+
if os.path.exists(self.token_file_path):
|
|
87
|
+
token = open(self.token_file_path).read()
|
|
88
|
+
try:
|
|
89
|
+
self.set_token(token)
|
|
90
|
+
except MissingTabletLink:
|
|
91
|
+
self.set_token(token, remarkable=True)
|
|
92
|
+
else:
|
|
93
|
+
self.get_token()
|
|
94
|
+
else:
|
|
95
|
+
self.token = token
|
|
96
|
+
|
|
97
|
+
self.log_file = log_file
|
|
98
|
+
self.log_lock = threading.Lock()
|
|
99
|
+
|
|
100
|
+
# Set up logging configuration
|
|
101
|
+
logging.basicConfig(filename=self.log_file, level=logging.INFO,
|
|
102
|
+
format='%(asctime)s - %(message)s',
|
|
103
|
+
filemode='a') # 'a' for append mode
|
|
104
|
+
self.loop = asyncio.get_event_loop()
|
|
105
|
+
|
|
106
|
+
@property
|
|
107
|
+
def hook_list(self):
|
|
108
|
+
return self._hook_list
|
|
109
|
+
|
|
110
|
+
def force_stop_all(self):
|
|
111
|
+
for operation in self.download_operations:
|
|
112
|
+
self.cancel_download_operation(operation)
|
|
113
|
+
|
|
114
|
+
def begin_download_operation(self, operation: DownloadOperation):
|
|
115
|
+
self.download_operations.append(operation)
|
|
116
|
+
self.spread_event(operation.begin_event)
|
|
117
|
+
|
|
118
|
+
def finish_download_operation(self, operation: DownloadOperation):
|
|
119
|
+
try:
|
|
120
|
+
self.download_operations.remove(operation)
|
|
121
|
+
self.spread_event(operation.finish_event)
|
|
122
|
+
except ValueError:
|
|
123
|
+
pass
|
|
124
|
+
|
|
125
|
+
def cancel_download_operation(self, operation: DownloadOperation):
|
|
126
|
+
self.download_operations.remove(operation)
|
|
127
|
+
operation.cancel()
|
|
128
|
+
self.spread_event(operation.cancel_event)
|
|
129
|
+
|
|
130
|
+
def reconnect(self):
|
|
131
|
+
self.connected_to_notifications = False
|
|
132
|
+
self._use_new_sync = False
|
|
133
|
+
self.offline_mode = False
|
|
134
|
+
self.document_storage_uri = None
|
|
135
|
+
self.document_notifications_uri = None
|
|
136
|
+
if not self.uri.endswith("/"):
|
|
137
|
+
self.uri += "/"
|
|
138
|
+
if not self.discovery_uri.endswith("/"):
|
|
139
|
+
self.discovery_uri += "/"
|
|
140
|
+
|
|
141
|
+
@property
|
|
142
|
+
def use_new_sync(self):
|
|
143
|
+
return self._use_new_sync
|
|
144
|
+
|
|
145
|
+
def connect_to_notifications(self):
|
|
146
|
+
if self.connected_to_notifications or self.offline_mode:
|
|
147
|
+
return
|
|
148
|
+
self.check_for_document_notifications()
|
|
149
|
+
handle_notifications(self)
|
|
150
|
+
self.connected_to_notifications = True
|
|
151
|
+
|
|
152
|
+
@use_new_sync.setter
|
|
153
|
+
def use_new_sync(self, value):
|
|
154
|
+
if not self._use_new_sync and value:
|
|
155
|
+
handle_new_api_steps(self)
|
|
156
|
+
self._use_new_sync = value
|
|
157
|
+
|
|
158
|
+
@property
|
|
159
|
+
def token(self):
|
|
160
|
+
return self._token
|
|
161
|
+
|
|
162
|
+
def set_token(self, value, remarkable: bool = False):
|
|
163
|
+
if not value:
|
|
164
|
+
return
|
|
165
|
+
token = refresh_token(self, value, remarkable)
|
|
166
|
+
self.session.headers["Authorization"] = f"Bearer {token}"
|
|
167
|
+
self._token = token
|
|
168
|
+
|
|
169
|
+
@token.setter
|
|
170
|
+
def token(self, value):
|
|
171
|
+
self.set_token(value)
|
|
172
|
+
|
|
173
|
+
def get_token(self, code: str = None, remarkable: bool = False):
|
|
174
|
+
self.set_token(get_token(self, code, remarkable), remarkable)
|
|
175
|
+
|
|
176
|
+
def get_documents(self, progress=lambda d, i: None):
|
|
177
|
+
self.check_for_document_storage()
|
|
178
|
+
if self.use_new_sync:
|
|
179
|
+
get_documents_new_sync(self, progress)
|
|
180
|
+
else:
|
|
181
|
+
get_documents_old_sync(self, progress)
|
|
182
|
+
|
|
183
|
+
def get_root(self):
|
|
184
|
+
self.check_for_document_storage()
|
|
185
|
+
if self.use_new_sync:
|
|
186
|
+
return get_root_new(self)
|
|
187
|
+
else:
|
|
188
|
+
return get_root_old(self)
|
|
189
|
+
|
|
190
|
+
def spread_event(self, event: object):
|
|
191
|
+
with self._hook_lock:
|
|
192
|
+
for hook in self._hook_list.values():
|
|
193
|
+
hook(event)
|
|
194
|
+
|
|
195
|
+
def add_hook(self, hook_id, hook):
|
|
196
|
+
with self._hook_lock:
|
|
197
|
+
self._hook_list[hook_id] = hook
|
|
198
|
+
|
|
199
|
+
def remove_hook(self, hook_id):
|
|
200
|
+
with self._hook_lock:
|
|
201
|
+
try:
|
|
202
|
+
del self._hook_list[hook_id]
|
|
203
|
+
except KeyError:
|
|
204
|
+
pass
|
|
205
|
+
|
|
206
|
+
def check_for_document_storage(self):
|
|
207
|
+
if self.offline_mode:
|
|
208
|
+
return
|
|
209
|
+
if not self.document_storage_uri:
|
|
210
|
+
uri = get_document_storage_uri(self)
|
|
211
|
+
if not uri:
|
|
212
|
+
return
|
|
213
|
+
elif uri == 'local.appspot.com':
|
|
214
|
+
uri = self.uri
|
|
215
|
+
else:
|
|
216
|
+
if not uri.endswith("/"):
|
|
217
|
+
uri += "/"
|
|
218
|
+
uri = f'https://{uri}'
|
|
219
|
+
|
|
220
|
+
self.document_storage_uri = uri
|
|
221
|
+
|
|
222
|
+
def upload(self, document: Union[Document, DocumentCollection], callback=None, unload: bool = False):
|
|
223
|
+
self.upload_many_documents([document], callback, unload)
|
|
224
|
+
|
|
225
|
+
def upload_many_documents(self, documents: List[Union[Document, DocumentCollection]], callback=None,
|
|
226
|
+
unload: bool = False):
|
|
227
|
+
self.sync_notifiers += 1
|
|
228
|
+
self._upload_lock.acquire()
|
|
229
|
+
upload_event = FileSyncProgress()
|
|
230
|
+
self.spread_event(upload_event)
|
|
231
|
+
upload_event.stage = STAGE_START
|
|
232
|
+
try:
|
|
233
|
+
# for document in documents:
|
|
234
|
+
# document.ensure_download()
|
|
235
|
+
self._upload_document_contents(documents, upload_event)
|
|
236
|
+
except:
|
|
237
|
+
print_exc()
|
|
238
|
+
finally:
|
|
239
|
+
upload_event.finished = True
|
|
240
|
+
if unload:
|
|
241
|
+
for document in documents:
|
|
242
|
+
document.unload_files()
|
|
243
|
+
time.sleep(.1)
|
|
244
|
+
self._upload_lock.release()
|
|
245
|
+
self.sync_notifiers -= 1
|
|
246
|
+
|
|
247
|
+
def delete(self, document: Union[Document, DocumentCollection], callback=None, unload: bool = True):
|
|
248
|
+
self.delete_many_documents([document], callback, unload)
|
|
249
|
+
|
|
250
|
+
def delete_many_documents(self, documents: List[Union[Document, DocumentCollection]], callback=None,
|
|
251
|
+
unload: bool = True):
|
|
252
|
+
self.sync_notifiers += 1
|
|
253
|
+
self._upload_lock.acquire()
|
|
254
|
+
upload_event = FileSyncProgress()
|
|
255
|
+
self.spread_event(upload_event)
|
|
256
|
+
upload_event.stage = STAGE_START
|
|
257
|
+
try:
|
|
258
|
+
self._delete_document_contents(documents, upload_event)
|
|
259
|
+
except:
|
|
260
|
+
print_exc()
|
|
261
|
+
finally:
|
|
262
|
+
upload_event.finished = True
|
|
263
|
+
if unload:
|
|
264
|
+
for document in documents:
|
|
265
|
+
document.unload_files()
|
|
266
|
+
time.sleep(.1)
|
|
267
|
+
self._upload_lock.release()
|
|
268
|
+
self.sync_notifiers -= 1
|
|
269
|
+
|
|
270
|
+
def _upload_document_contents(self, documents: List[Union[Document, DocumentCollection]],
|
|
271
|
+
progress: FileSyncProgress):
|
|
272
|
+
# We need to upload the content, metadata, rm file, file list and update root
|
|
273
|
+
# This is the order that remarkable expects the upload to happen in, anything else and they might detect it as
|
|
274
|
+
# API tampering, so we want to follow their upload cycle
|
|
275
|
+
if self.offline_mode:
|
|
276
|
+
progress.total = 1
|
|
277
|
+
progress.done = 1
|
|
278
|
+
return
|
|
279
|
+
|
|
280
|
+
progress.total += 2 # Getting root / Updating root
|
|
281
|
+
|
|
282
|
+
progress.stage = STAGE_GET_ROOT
|
|
283
|
+
|
|
284
|
+
root = self.get_root() # root info
|
|
285
|
+
|
|
286
|
+
_, files = get_file(self, root['hash'])
|
|
287
|
+
progress.done += 1 # Got root
|
|
288
|
+
|
|
289
|
+
new_root = {
|
|
290
|
+
"broadcast": True,
|
|
291
|
+
"generation": root['generation']
|
|
292
|
+
}
|
|
293
|
+
|
|
294
|
+
document_files = [
|
|
295
|
+
File(
|
|
296
|
+
None,
|
|
297
|
+
document.uuid,
|
|
298
|
+
len(document.files), 0,
|
|
299
|
+
f"{document.uuid}.docSchema"
|
|
300
|
+
)
|
|
301
|
+
for document in documents
|
|
302
|
+
]
|
|
303
|
+
|
|
304
|
+
uuids = [document.uuid for document in documents]
|
|
305
|
+
new_root_files = document_files + [
|
|
306
|
+
file
|
|
307
|
+
for file in files
|
|
308
|
+
if file.uuid not in uuids
|
|
309
|
+
]
|
|
310
|
+
|
|
311
|
+
old_files = []
|
|
312
|
+
files_with_changes = []
|
|
313
|
+
|
|
314
|
+
progress.stage = STAGE_EXPORT_DOCUMENTS
|
|
315
|
+
for document in documents:
|
|
316
|
+
document.check()
|
|
317
|
+
document.export()
|
|
318
|
+
document.provision = True
|
|
319
|
+
progress.total += len(document.files)
|
|
320
|
+
self.documents.update({
|
|
321
|
+
document.uuid: document
|
|
322
|
+
for document in documents if isinstance(document, Document)
|
|
323
|
+
})
|
|
324
|
+
self.document_collections.update({
|
|
325
|
+
document_collection.uuid: document_collection
|
|
326
|
+
for document_collection in documents if isinstance(document_collection, DocumentCollection)
|
|
327
|
+
})
|
|
328
|
+
self.spread_event(NewDocuments())
|
|
329
|
+
|
|
330
|
+
# Figure out what files have changed
|
|
331
|
+
progress.stage = STAGE_DIFF_CHECK_DOCUMENTS
|
|
332
|
+
for document in documents:
|
|
333
|
+
for file in document.files:
|
|
334
|
+
try:
|
|
335
|
+
exists = check_file_exists(self, file.hash, binary=True, use_cache=False)
|
|
336
|
+
if not exists:
|
|
337
|
+
files_with_changes.append(file)
|
|
338
|
+
else:
|
|
339
|
+
old_files.append(file)
|
|
340
|
+
except:
|
|
341
|
+
files_with_changes.append(file)
|
|
342
|
+
finally:
|
|
343
|
+
progress.done += 1
|
|
344
|
+
|
|
345
|
+
progress.stage = STAGE_PREPARE_DATA
|
|
346
|
+
|
|
347
|
+
# Copy the content data so we can add more files to it
|
|
348
|
+
content_datas = {}
|
|
349
|
+
for document in documents:
|
|
350
|
+
content_datas.update(document.content_data.copy())
|
|
351
|
+
|
|
352
|
+
# Update the hash for files that have changed
|
|
353
|
+
for file in files_with_changes:
|
|
354
|
+
if data := content_datas.get(file.uuid):
|
|
355
|
+
file.hash = make_hash(data)
|
|
356
|
+
file.size = len(data)
|
|
357
|
+
|
|
358
|
+
# Make a new document file with the updated files for this document
|
|
359
|
+
|
|
360
|
+
progress.stage = STAGE_COMPILE_DATA
|
|
361
|
+
|
|
362
|
+
for document, document_file in zip(documents, document_files):
|
|
363
|
+
document_file_content = document_file.update_document_file(self, document.files, content_datas)
|
|
364
|
+
|
|
365
|
+
# Add the document file to the content_data
|
|
366
|
+
content_datas[document_file.uuid] = document_file_content
|
|
367
|
+
files_with_changes.append(document_file)
|
|
368
|
+
|
|
369
|
+
# Prepare the root file
|
|
370
|
+
progress.stage = STAGE_PREPARE_ROOT
|
|
371
|
+
root_file_content, root_file = File.create_root_file(new_root_files)
|
|
372
|
+
new_root['hash'] = root_file.hash
|
|
373
|
+
|
|
374
|
+
files_with_changes.append(root_file)
|
|
375
|
+
content_datas[root_file.uuid] = root_file_content
|
|
376
|
+
|
|
377
|
+
# Upload all the files that have changed
|
|
378
|
+
document_operations = {}
|
|
379
|
+
progress.stage = STAGE_PREPARE_OPERATIONS
|
|
380
|
+
|
|
381
|
+
for document in documents:
|
|
382
|
+
document_sync_operation = DocumentSyncProgress(document.uuid, progress)
|
|
383
|
+
document_operations[document.uuid] = document_sync_operation
|
|
384
|
+
|
|
385
|
+
progress.stage = STAGE_UPLOAD
|
|
386
|
+
|
|
387
|
+
futures = []
|
|
388
|
+
progress.total += len(files_with_changes)
|
|
389
|
+
with ThreadPoolExecutor(max_workers=4) as executor:
|
|
390
|
+
loop = asyncio.new_event_loop() # Get the current event loop
|
|
391
|
+
for file in sorted(files_with_changes, key=lambda f: f.size):
|
|
392
|
+
if (document_uuid := file.uuid.split('/')[0].split('.')[0]) in document_operations:
|
|
393
|
+
document_operation = document_operations[document_uuid]
|
|
394
|
+
else:
|
|
395
|
+
document_operations[file.uuid] = DocumentSyncProgress(file.uuid, progress)
|
|
396
|
+
document_operation = document_operations[file.uuid]
|
|
397
|
+
|
|
398
|
+
if file.uuid.endswith('.content') or file.uuid.endswith('.metadata'):
|
|
399
|
+
file.save_to_cache(self, content_datas[file.uuid])
|
|
400
|
+
|
|
401
|
+
# This is where you use run_in_executor to call your async function in a separate thread
|
|
402
|
+
future = loop.run_in_executor(executor, put_file, self, file, content_datas[file.uuid],
|
|
403
|
+
document_operation)
|
|
404
|
+
futures.append(future)
|
|
405
|
+
executor.shutdown(wait=True)
|
|
406
|
+
|
|
407
|
+
# Wait for operation to finish
|
|
408
|
+
while not all(operation.finished for operation in document_operations.values()):
|
|
409
|
+
time.sleep(.1)
|
|
410
|
+
|
|
411
|
+
# Update the root
|
|
412
|
+
progress.stage = STAGE_UPDATE_ROOT
|
|
413
|
+
try:
|
|
414
|
+
update_root(self, new_root)
|
|
415
|
+
except RootUploadFailure:
|
|
416
|
+
self.log("Sync root failed, this is fine if you decided to sync on another device / start a secondary sync")
|
|
417
|
+
progress.done = -1
|
|
418
|
+
progress.total = 0
|
|
419
|
+
self._upload_document_contents(documents, progress)
|
|
420
|
+
progress.done += 1 # Update done finally matching done/total
|
|
421
|
+
|
|
422
|
+
for document in documents:
|
|
423
|
+
document.content_data.clear()
|
|
424
|
+
document.files_available = document.check_files_availability()
|
|
425
|
+
document.provision = False
|
|
426
|
+
|
|
427
|
+
if self.sync_notifiers <= 1:
|
|
428
|
+
self.spread_event(SyncRefresh())
|
|
429
|
+
|
|
430
|
+
def _delete_document_contents(self, documents: List[Union[Document, DocumentCollection]],
|
|
431
|
+
progress: FileSyncProgress):
|
|
432
|
+
# We need to remove the documents from the root and upload the new root
|
|
433
|
+
|
|
434
|
+
if self.offline_mode:
|
|
435
|
+
progress.total = 1
|
|
436
|
+
progress.done = 1
|
|
437
|
+
return
|
|
438
|
+
|
|
439
|
+
progress.total += 2 # Getting root / Updating root
|
|
440
|
+
|
|
441
|
+
progress.stage = STAGE_GET_ROOT
|
|
442
|
+
|
|
443
|
+
root = self.get_root() # root info
|
|
444
|
+
|
|
445
|
+
_, files = get_file(self, root['hash'])
|
|
446
|
+
progress.done += 1 # Got root
|
|
447
|
+
|
|
448
|
+
new_root = {
|
|
449
|
+
"broadcast": True,
|
|
450
|
+
"generation": root['generation']
|
|
451
|
+
}
|
|
452
|
+
|
|
453
|
+
uuids = [document.uuid for document in documents]
|
|
454
|
+
|
|
455
|
+
new_root_files = [
|
|
456
|
+
file
|
|
457
|
+
for file in files
|
|
458
|
+
if file.uuid not in uuids
|
|
459
|
+
] # Include all the old data without this data
|
|
460
|
+
|
|
461
|
+
# Prepare the root file
|
|
462
|
+
progress.stage = STAGE_PREPARE_ROOT
|
|
463
|
+
root_sync_operation = DocumentSyncProgress('root', progress)
|
|
464
|
+
root_file_content, root_file = File.create_root_file(new_root_files)
|
|
465
|
+
new_root['hash'] = root_file.hash
|
|
466
|
+
|
|
467
|
+
put_file(self, root_file, root_file_content, root_sync_operation)
|
|
468
|
+
|
|
469
|
+
# Update the root
|
|
470
|
+
progress.stage = STAGE_UPDATE_ROOT
|
|
471
|
+
try:
|
|
472
|
+
update_root(self, new_root)
|
|
473
|
+
except RootUploadFailure:
|
|
474
|
+
self.log("Sync root failed, this is fine if you decided to sync on another device / start a secondary sync")
|
|
475
|
+
progress.done = -1
|
|
476
|
+
progress.total = 0
|
|
477
|
+
self._upload_document_contents(documents, progress)
|
|
478
|
+
progress.done += 1 # Update done finally matching done/total
|
|
479
|
+
|
|
480
|
+
if self.sync_notifiers <= 1:
|
|
481
|
+
self.spread_event(SyncRefresh())
|
|
482
|
+
|
|
483
|
+
def check_for_document_notifications(self):
|
|
484
|
+
if not self.document_notifications_uri:
|
|
485
|
+
uri = get_document_notifications_uri(self)
|
|
486
|
+
if not uri:
|
|
487
|
+
return
|
|
488
|
+
elif uri == 'local.appspot.com':
|
|
489
|
+
uri = self.uri
|
|
490
|
+
else:
|
|
491
|
+
if not uri.endswith("/"):
|
|
492
|
+
uri += "/"
|
|
493
|
+
uri = f'https://{uri}'
|
|
494
|
+
self.document_notifications_uri = uri
|
|
495
|
+
|
|
496
|
+
def log(self, *args, enable_print=False):
|
|
497
|
+
with self.log_lock:
|
|
498
|
+
if self.debug and enable_print:
|
|
499
|
+
print(*args)
|
|
500
|
+
logging.info(' '.join(map(str, args)))
|
|
501
|
+
|
|
502
|
+
def reset_root(self):
|
|
503
|
+
root = self.get_root()
|
|
504
|
+
|
|
505
|
+
new_root = {
|
|
506
|
+
"broadcast": True,
|
|
507
|
+
"generation": root.get('generation', 0)
|
|
508
|
+
}
|
|
509
|
+
|
|
510
|
+
root_file_content = b'3\n'
|
|
511
|
+
|
|
512
|
+
root_file = models.File(models.make_hash(root_file_content), f"root.docSchema", 0, len(root_file_content))
|
|
513
|
+
new_root['hash'] = root_file.hash
|
|
514
|
+
put_file(self, root_file, root_file_content, DocumentSyncProgress(''))
|
|
515
|
+
update_root(self, new_root)
|
|
516
|
+
_, files = get_file(self, new_root['hash'])
|
rm_api/auth.py
ADDED
|
@@ -0,0 +1,74 @@
|
|
|
1
|
+
from typing import TYPE_CHECKING
|
|
2
|
+
from uuid import uuid4
|
|
3
|
+
|
|
4
|
+
import requests
|
|
5
|
+
|
|
6
|
+
if TYPE_CHECKING:
|
|
7
|
+
from rm_api import API
|
|
8
|
+
|
|
9
|
+
TOKEN_URL = "{0}token/json/2/device/new"
|
|
10
|
+
TOKEN_REFRESH_URL = "{0}token/json/2/user/new"
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class FailedToRefreshToken(Exception):
|
|
14
|
+
pass
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class FailedToGetToken(Exception):
|
|
18
|
+
pass
|
|
19
|
+
|
|
20
|
+
class MissingTabletLink(Exception):
|
|
21
|
+
pass
|
|
22
|
+
|
|
23
|
+
def get_token(api: 'API', code: str = None, remarkable: bool = False):
|
|
24
|
+
if not api.require_token and not code:
|
|
25
|
+
return None
|
|
26
|
+
if not code:
|
|
27
|
+
code = input("Enter your connect code: ")
|
|
28
|
+
response = api.session.post(
|
|
29
|
+
TOKEN_URL.format(api.uri),
|
|
30
|
+
json={
|
|
31
|
+
"code": code,
|
|
32
|
+
"deviceDesc": "remarkable" if remarkable else "desktop-windows",
|
|
33
|
+
"deviceID": uuid4().hex,
|
|
34
|
+
"secret": ""
|
|
35
|
+
},
|
|
36
|
+
headers={
|
|
37
|
+
"Authorization": f"Bearer "
|
|
38
|
+
}
|
|
39
|
+
)
|
|
40
|
+
if response.status_code != 200:
|
|
41
|
+
if api.require_token:
|
|
42
|
+
print(f'Got status code {response.status_code}')
|
|
43
|
+
return get_token(api)
|
|
44
|
+
else:
|
|
45
|
+
raise FailedToGetToken("Could not get token")
|
|
46
|
+
|
|
47
|
+
with open(api.token_file_path, "w") as f:
|
|
48
|
+
f.write(response.text)
|
|
49
|
+
|
|
50
|
+
return response.text
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
def refresh_token(api: 'API', token: str, remarkable: bool = False):
|
|
54
|
+
if not token:
|
|
55
|
+
if api.require_token:
|
|
56
|
+
return refresh_token(api, get_token(api, remarkable=remarkable))
|
|
57
|
+
try:
|
|
58
|
+
response = requests.post(
|
|
59
|
+
TOKEN_REFRESH_URL.format(api.uri),
|
|
60
|
+
headers={"Authorization": f"Bearer {token}"},
|
|
61
|
+
timeout=1,
|
|
62
|
+
)
|
|
63
|
+
except (TimeoutError, requests.exceptions.ConnectionError):
|
|
64
|
+
api.offline_mode = True
|
|
65
|
+
return None
|
|
66
|
+
if b'rM device' in response.content and not remarkable:
|
|
67
|
+
raise MissingTabletLink("You need to link your tablet first or use remarkable=True")
|
|
68
|
+
if response.status_code != 200:
|
|
69
|
+
if api.require_token:
|
|
70
|
+
return refresh_token(api, get_token(api, remarkable=remarkable), remarkable)
|
|
71
|
+
else:
|
|
72
|
+
raise FailedToRefreshToken("Could not refresh token")
|
|
73
|
+
|
|
74
|
+
return response.text
|
rm_api/defaults.py
ADDED
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
from enum import Enum
|
|
2
|
+
|
|
3
|
+
RM_SCREEN_SIZE = (1404, 1872)
|
|
4
|
+
RM_SCREEN_CENTER = tuple(v // 2 for v in RM_SCREEN_SIZE)
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
class ZoomModes(Enum):
|
|
8
|
+
BestFit = 'bestFit' # Default
|
|
9
|
+
CustomFit = 'customFit'
|
|
10
|
+
FitToWidth = 'fitToWidth'
|
|
11
|
+
FitToHeight = 'fitToHeight'
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class FileTypes(Enum):
|
|
15
|
+
PDF = 'pdf'
|
|
16
|
+
EPUB = 'epub'
|
|
17
|
+
Notebook = 'notebook'
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
class Orientations(Enum):
|
|
21
|
+
Portrait = 'portrait'
|
|
22
|
+
Landscape = 'landscape'
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
class DocumentTypes(Enum):
|
|
26
|
+
Document = 'DocumentType'
|
|
27
|
+
Collection = 'CollectionType'
|
rm_api/helpers.py
ADDED
|
@@ -0,0 +1,59 @@
|
|
|
1
|
+
from functools import wraps
|
|
2
|
+
from io import BytesIO
|
|
3
|
+
from itertools import islice
|
|
4
|
+
from threading import Thread
|
|
5
|
+
from traceback import format_exc
|
|
6
|
+
from typing import TYPE_CHECKING
|
|
7
|
+
|
|
8
|
+
from PyPDF2 import PdfReader
|
|
9
|
+
from colorama import Fore
|
|
10
|
+
|
|
11
|
+
from rm_api.notifications.models import DownloadOperation
|
|
12
|
+
from rm_api.storage.common import FileHandle
|
|
13
|
+
|
|
14
|
+
if TYPE_CHECKING:
|
|
15
|
+
from . import API
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
def get_pdf_page_count(pdf: bytes):
|
|
19
|
+
if isinstance(pdf, FileHandle):
|
|
20
|
+
reader = PdfReader(pdf)
|
|
21
|
+
else:
|
|
22
|
+
reader = PdfReader(BytesIO(pdf))
|
|
23
|
+
|
|
24
|
+
return len(reader.pages)
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
def threaded(fn):
|
|
28
|
+
@wraps(fn)
|
|
29
|
+
def wrapper(*args, **kwargs):
|
|
30
|
+
thread = Thread(target=fn, args=args, kwargs=kwargs, daemon=True)
|
|
31
|
+
thread.start()
|
|
32
|
+
return thread
|
|
33
|
+
|
|
34
|
+
return wrapper
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
def batched(iterable, batch_size):
|
|
38
|
+
it = iter(iterable)
|
|
39
|
+
while batch := list(islice(it, batch_size)):
|
|
40
|
+
yield batch
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
def download_operation_wrapper(fn):
|
|
44
|
+
@wraps(fn)
|
|
45
|
+
def wrapped(api: 'API', *args, **kwargs):
|
|
46
|
+
ref = kwargs.get('ref') # Download operation reference, for example document or collection
|
|
47
|
+
operation = DownloadOperation(ref)
|
|
48
|
+
api.begin_download_operation(operation)
|
|
49
|
+
kwargs['operation'] = operation
|
|
50
|
+
try:
|
|
51
|
+
data = fn(api, *args, **kwargs)
|
|
52
|
+
except DownloadOperation.DownloadCancelException:
|
|
53
|
+
api.log(f'DOWNLOAD CANCELLED\n{Fore.LIGHTBLACK_EX}{format_exc()}{Fore.RESET}')
|
|
54
|
+
raise
|
|
55
|
+
operation.finish()
|
|
56
|
+
api.finish_download_operation(operation)
|
|
57
|
+
return data
|
|
58
|
+
|
|
59
|
+
return wrapped
|