pyturso 0.1.5rc5__cp310-cp310-macosx_11_0_arm64.whl → 0.4.0rc9__cp310-cp310-macosx_11_0_arm64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of pyturso might be problematic. Click here for more details.
- pyturso-0.4.0rc9.dist-info/METADATA +209 -0
- pyturso-0.4.0rc9.dist-info/RECORD +14 -0
- {pyturso-0.1.5rc5.dist-info → pyturso-0.4.0rc9.dist-info}/WHEEL +1 -1
- turso/__init__.py +19 -7
- turso/_turso.cpython-310-darwin.so +0 -0
- turso/aio/__init__.py +7 -0
- turso/aio/sync/__init__.py +15 -0
- turso/lib.py +926 -0
- turso/lib_aio.py +351 -0
- turso/lib_sync.py +468 -0
- turso/lib_sync_aio.py +93 -0
- turso/sync/__init__.py +15 -0
- turso/worker.py +43 -0
- pyturso-0.1.5rc5.dist-info/METADATA +0 -33
- pyturso-0.1.5rc5.dist-info/RECORD +0 -6
turso/lib_sync.py
ADDED
|
@@ -0,0 +1,468 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import os
|
|
4
|
+
import urllib.error
|
|
5
|
+
|
|
6
|
+
# for HTTP IO
|
|
7
|
+
import urllib.request
|
|
8
|
+
from dataclasses import dataclass
|
|
9
|
+
from typing import Any, Callable, Iterable, Optional, Tuple, Union
|
|
10
|
+
|
|
11
|
+
from ._turso import (
|
|
12
|
+
Misuse,
|
|
13
|
+
PyTursoAsyncOperation,
|
|
14
|
+
PyTursoAsyncOperationResultKind,
|
|
15
|
+
PyTursoConnection,
|
|
16
|
+
PyTursoDatabaseConfig,
|
|
17
|
+
PyTursoPartialSyncOpts,
|
|
18
|
+
PyTursoSyncDatabase,
|
|
19
|
+
PyTursoSyncDatabaseConfig,
|
|
20
|
+
PyTursoSyncDatabaseStats,
|
|
21
|
+
PyTursoSyncIoItem,
|
|
22
|
+
PyTursoSyncIoItemRequestKind,
|
|
23
|
+
py_turso_sync_new,
|
|
24
|
+
)
|
|
25
|
+
from .lib import Connection as _Connection
|
|
26
|
+
|
|
27
|
+
# Constants
|
|
28
|
+
_HTTP_CHUNK_SIZE = 64 * 1024 # 64 KiB
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
@dataclass
|
|
32
|
+
class PartialSyncPrefixBootstrap:
|
|
33
|
+
# Bootstraps DB by fetching first N bytes/pages; enables partial sync
|
|
34
|
+
length: int
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
@dataclass
|
|
38
|
+
class PartialSyncQueryBootstrap:
|
|
39
|
+
# Bootstraps DB by fetching pages touched by given SQL query on server
|
|
40
|
+
query: str
|
|
41
|
+
|
|
42
|
+
@dataclass
|
|
43
|
+
class PartialSyncOpts:
|
|
44
|
+
bootstrap_strategy: Union[PartialSyncPrefixBootstrap, PartialSyncQueryBootstrap]
|
|
45
|
+
segment_size: Optional[int] = None
|
|
46
|
+
speculative_load: Optional[bool] = None
|
|
47
|
+
|
|
48
|
+
class _HttpContext:
|
|
49
|
+
"""
|
|
50
|
+
Resolved network/auth configuration used by sync engine IO handler.
|
|
51
|
+
remote_url and auth_token can be static strings or callables (evaluated per request).
|
|
52
|
+
"""
|
|
53
|
+
|
|
54
|
+
def __init__(
|
|
55
|
+
self,
|
|
56
|
+
remote_url: Union[str, Callable[[], Optional[str]]],
|
|
57
|
+
auth_token: Optional[Union[str, Callable[[], Optional[str]]]],
|
|
58
|
+
client_name: str,
|
|
59
|
+
) -> None:
|
|
60
|
+
self.remote_url = remote_url
|
|
61
|
+
self.auth_token = auth_token
|
|
62
|
+
self.client_name = client_name
|
|
63
|
+
|
|
64
|
+
def _eval(self, v: Union[str, Callable[[], Optional[str]]]) -> Optional[str]:
|
|
65
|
+
if callable(v):
|
|
66
|
+
return v()
|
|
67
|
+
return v
|
|
68
|
+
|
|
69
|
+
def base_url(self) -> str:
|
|
70
|
+
url = self._eval(self.remote_url)
|
|
71
|
+
if not url:
|
|
72
|
+
raise RuntimeError("remote_url is not available")
|
|
73
|
+
return url
|
|
74
|
+
|
|
75
|
+
def token(self) -> Optional[str]:
|
|
76
|
+
if self.auth_token is None:
|
|
77
|
+
return None
|
|
78
|
+
return self._eval(self.auth_token)
|
|
79
|
+
|
|
80
|
+
|
|
81
|
+
def _join_url(base: str, path: str) -> str:
|
|
82
|
+
if not base:
|
|
83
|
+
return path
|
|
84
|
+
if base.endswith("/") and path.startswith("/"):
|
|
85
|
+
return base[:-1] + path
|
|
86
|
+
if not base.endswith("/") and not path.startswith("/"):
|
|
87
|
+
return base + "/" + path
|
|
88
|
+
return base + path
|
|
89
|
+
|
|
90
|
+
|
|
91
|
+
def _headers_iter_to_pairs(headers: Iterable[Tuple[str, str]]) -> list[tuple[str, str]]:
|
|
92
|
+
pairs: list[tuple[str, str]] = []
|
|
93
|
+
for h in headers:
|
|
94
|
+
try:
|
|
95
|
+
k, v = h
|
|
96
|
+
except Exception:
|
|
97
|
+
# best-effort skip invalid headers
|
|
98
|
+
continue
|
|
99
|
+
pairs.append((str(k), str(v)))
|
|
100
|
+
return pairs
|
|
101
|
+
|
|
102
|
+
|
|
103
|
+
# ruff: noqa: C901
|
|
104
|
+
def _process_http_item(
|
|
105
|
+
sync: PyTursoSyncDatabase,
|
|
106
|
+
io_item: PyTursoSyncIoItem,
|
|
107
|
+
req_kind: Any,
|
|
108
|
+
ctx: _HttpContext,
|
|
109
|
+
current_op: Optional[PyTursoAsyncOperation],
|
|
110
|
+
) -> None:
|
|
111
|
+
"""
|
|
112
|
+
Execute HTTP request, stream response to sync io completion.
|
|
113
|
+
"""
|
|
114
|
+
# Access request fields
|
|
115
|
+
method = req_kind.method
|
|
116
|
+
path = req_kind.path
|
|
117
|
+
body: Optional[bytes] = None
|
|
118
|
+
if req_kind.body is not None:
|
|
119
|
+
# req_kind.body is PyBytes -> bytes
|
|
120
|
+
body = bytes(req_kind.body)
|
|
121
|
+
|
|
122
|
+
headers_list = []
|
|
123
|
+
if req_kind.headers is not None:
|
|
124
|
+
headers_list = _headers_iter_to_pairs(req_kind.headers) # list[(k,v)]
|
|
125
|
+
|
|
126
|
+
try:
|
|
127
|
+
base_url = ctx.base_url()
|
|
128
|
+
except Exception as e:
|
|
129
|
+
io_item.poison(f"remote url unavailable: {e}")
|
|
130
|
+
return
|
|
131
|
+
|
|
132
|
+
# Build full URL
|
|
133
|
+
url = _join_url(base_url, path)
|
|
134
|
+
|
|
135
|
+
# Build request
|
|
136
|
+
request = urllib.request.Request(url=url, data=body, method=method)
|
|
137
|
+
# Add provided headers
|
|
138
|
+
seen_auth = False
|
|
139
|
+
for k, v in headers_list:
|
|
140
|
+
request.add_header(k, v)
|
|
141
|
+
if k.lower() == "authorization":
|
|
142
|
+
seen_auth = True
|
|
143
|
+
|
|
144
|
+
# Add Authorization if not present and token provided
|
|
145
|
+
token = None
|
|
146
|
+
try:
|
|
147
|
+
token = ctx.token()
|
|
148
|
+
except Exception:
|
|
149
|
+
# token resolver failure -> bubble up as IO error
|
|
150
|
+
io_item.poison("auth token resolver failed")
|
|
151
|
+
return
|
|
152
|
+
|
|
153
|
+
if token is None and not seen_auth:
|
|
154
|
+
# No token provided; some endpoints can be public; proceed without it.
|
|
155
|
+
pass
|
|
156
|
+
elif token is not None and not seen_auth:
|
|
157
|
+
request.add_header("Authorization", f"Bearer {token}")
|
|
158
|
+
|
|
159
|
+
# Add a clear user-agent to help server logs
|
|
160
|
+
if "User-Agent" not in request.headers:
|
|
161
|
+
request.add_header("User-Agent", f"{ctx.client_name}")
|
|
162
|
+
|
|
163
|
+
# Perform request
|
|
164
|
+
try:
|
|
165
|
+
with urllib.request.urlopen(request) as resp:
|
|
166
|
+
status = getattr(resp, "status", None)
|
|
167
|
+
if status is None:
|
|
168
|
+
try:
|
|
169
|
+
status = resp.getcode()
|
|
170
|
+
except Exception:
|
|
171
|
+
status = 200
|
|
172
|
+
io_item.status(int(status))
|
|
173
|
+
# Stream response in chunks
|
|
174
|
+
while True:
|
|
175
|
+
chunk = resp.read(_HTTP_CHUNK_SIZE)
|
|
176
|
+
if not chunk:
|
|
177
|
+
break
|
|
178
|
+
io_item.push_buffer(chunk)
|
|
179
|
+
if current_op is not None:
|
|
180
|
+
# The operation should still be waiting for IO
|
|
181
|
+
r = current_op.resume()
|
|
182
|
+
# Per contract, while streaming response operation must not finish
|
|
183
|
+
# We don't raise if it did, but assert in debug builds
|
|
184
|
+
try:
|
|
185
|
+
assert r is None
|
|
186
|
+
except Exception:
|
|
187
|
+
# continue anyway
|
|
188
|
+
pass
|
|
189
|
+
io_item.done()
|
|
190
|
+
except urllib.error.HTTPError as e:
|
|
191
|
+
# HTTPError has a response body we may stream to completion
|
|
192
|
+
status = getattr(e, "code", 500)
|
|
193
|
+
io_item.status(int(status))
|
|
194
|
+
try:
|
|
195
|
+
# e.read() may not be available in all Python versions; use e.fp if present
|
|
196
|
+
stream = e
|
|
197
|
+
# Attempt to read the error body and forward it
|
|
198
|
+
while True:
|
|
199
|
+
chunk = stream.read(_HTTP_CHUNK_SIZE)
|
|
200
|
+
if not chunk:
|
|
201
|
+
break
|
|
202
|
+
io_item.push_buffer(chunk)
|
|
203
|
+
if current_op is not None:
|
|
204
|
+
r = current_op.resume()
|
|
205
|
+
try:
|
|
206
|
+
assert r is None
|
|
207
|
+
except Exception:
|
|
208
|
+
pass
|
|
209
|
+
except Exception:
|
|
210
|
+
# ignore body read failures
|
|
211
|
+
pass
|
|
212
|
+
finally:
|
|
213
|
+
io_item.done()
|
|
214
|
+
except urllib.error.URLError as e:
|
|
215
|
+
io_item.poison(f"network error: {e.reason}")
|
|
216
|
+
except Exception as e:
|
|
217
|
+
io_item.poison(f"http error: {e}")
|
|
218
|
+
|
|
219
|
+
|
|
220
|
+
def _process_full_read_item(io_item: PyTursoSyncIoItem, req_kind: Any) -> None:
|
|
221
|
+
"""
|
|
222
|
+
Fulfill full file read request by streaming file content if exists.
|
|
223
|
+
On not found - send empty response (not error).
|
|
224
|
+
"""
|
|
225
|
+
path = req_kind.path
|
|
226
|
+
try:
|
|
227
|
+
with open(path, "rb") as f:
|
|
228
|
+
while True:
|
|
229
|
+
chunk = f.read(_HTTP_CHUNK_SIZE)
|
|
230
|
+
if not chunk:
|
|
231
|
+
break
|
|
232
|
+
io_item.push_buffer(chunk)
|
|
233
|
+
io_item.done()
|
|
234
|
+
except FileNotFoundError:
|
|
235
|
+
# On not found engine expects empty response, not error
|
|
236
|
+
io_item.done()
|
|
237
|
+
except Exception as e:
|
|
238
|
+
io_item.poison(f"fs read error: {e}")
|
|
239
|
+
|
|
240
|
+
|
|
241
|
+
def _process_full_write_item(io_item: PyTursoSyncIoItem, req_kind: Any) -> None:
|
|
242
|
+
"""
|
|
243
|
+
Fulfill full file write request by writing provided content atomically.
|
|
244
|
+
"""
|
|
245
|
+
path = req_kind.path
|
|
246
|
+
content: bytes = bytes(req_kind.content) if req_kind.content is not None else b""
|
|
247
|
+
# Ensure parent directory exists
|
|
248
|
+
try:
|
|
249
|
+
parent = os.path.dirname(path)
|
|
250
|
+
if parent and not os.path.exists(parent):
|
|
251
|
+
os.makedirs(parent, exist_ok=True)
|
|
252
|
+
except Exception:
|
|
253
|
+
# ignore directory creation errors, attempt to write anyway
|
|
254
|
+
pass
|
|
255
|
+
|
|
256
|
+
try:
|
|
257
|
+
with open(path, "wb") as f:
|
|
258
|
+
# Write in chunks if content is large
|
|
259
|
+
view = memoryview(content)
|
|
260
|
+
offset = 0
|
|
261
|
+
length = len(view)
|
|
262
|
+
while offset < length:
|
|
263
|
+
end = min(offset + _HTTP_CHUNK_SIZE, length)
|
|
264
|
+
f.write(view[offset:end])
|
|
265
|
+
offset = end
|
|
266
|
+
io_item.done()
|
|
267
|
+
except Exception as e:
|
|
268
|
+
io_item.poison(f"fs write error: {e}")
|
|
269
|
+
|
|
270
|
+
|
|
271
|
+
def _drain_sync_io(
|
|
272
|
+
sync: PyTursoSyncDatabase,
|
|
273
|
+
ctx: _HttpContext,
|
|
274
|
+
*,
|
|
275
|
+
current_op: Optional[PyTursoAsyncOperation] = None,
|
|
276
|
+
) -> None:
|
|
277
|
+
"""
|
|
278
|
+
Drain all pending IO items from sync engine queue and process them.
|
|
279
|
+
"""
|
|
280
|
+
while True:
|
|
281
|
+
item = sync.take_io_item()
|
|
282
|
+
try:
|
|
283
|
+
# tricky: we must do step_io_callbacks even if there is no IO in the queue
|
|
284
|
+
if item is None:
|
|
285
|
+
break
|
|
286
|
+
req = item.request()
|
|
287
|
+
if req.kind == PyTursoSyncIoItemRequestKind.Http and req.http is not None:
|
|
288
|
+
_process_http_item(sync, item, req.http, ctx, current_op)
|
|
289
|
+
elif req.kind == PyTursoSyncIoItemRequestKind.FullRead and req.full_read is not None:
|
|
290
|
+
_process_full_read_item(item, req.full_read)
|
|
291
|
+
elif req.kind == PyTursoSyncIoItemRequestKind.FullWrite and req.full_write is not None:
|
|
292
|
+
_process_full_write_item(item, req.full_write)
|
|
293
|
+
else:
|
|
294
|
+
item.poison("unknown io request kind")
|
|
295
|
+
except Exception as e:
|
|
296
|
+
# Safety net: poison unexpected failures
|
|
297
|
+
try:
|
|
298
|
+
item.poison(f"io processing error: {e}")
|
|
299
|
+
except Exception:
|
|
300
|
+
pass
|
|
301
|
+
finally:
|
|
302
|
+
# Allow engine to run any post-io callbacks
|
|
303
|
+
sync.step_io_callbacks()
|
|
304
|
+
|
|
305
|
+
|
|
306
|
+
def _run_op(
|
|
307
|
+
sync: PyTursoSyncDatabase,
|
|
308
|
+
op: PyTursoAsyncOperation,
|
|
309
|
+
ctx: _HttpContext,
|
|
310
|
+
) -> Any:
|
|
311
|
+
"""
|
|
312
|
+
Drive async operation to completion, servicing sync engine IO in between.
|
|
313
|
+
Returns operation result payload depending on kind:
|
|
314
|
+
- No: returns None
|
|
315
|
+
- Connection: returns PyTursoConnection
|
|
316
|
+
- Changes: returns PyTursoSyncDatabaseChanges
|
|
317
|
+
- Stats: returns PyTursoSyncDatabaseStats
|
|
318
|
+
"""
|
|
319
|
+
while True:
|
|
320
|
+
res = op.resume()
|
|
321
|
+
if res is None:
|
|
322
|
+
# Needs IO
|
|
323
|
+
_drain_sync_io(sync, ctx, current_op=op)
|
|
324
|
+
continue
|
|
325
|
+
# Finished
|
|
326
|
+
if res.kind == PyTursoAsyncOperationResultKind.No:
|
|
327
|
+
return None
|
|
328
|
+
if res.kind == PyTursoAsyncOperationResultKind.Connection and res.connection is not None:
|
|
329
|
+
return res.connection
|
|
330
|
+
if res.kind == PyTursoAsyncOperationResultKind.Changes and res.changes is not None:
|
|
331
|
+
return res.changes
|
|
332
|
+
if res.kind == PyTursoAsyncOperationResultKind.Stats and res.stats is not None:
|
|
333
|
+
return res.stats
|
|
334
|
+
# Unexpected; return None
|
|
335
|
+
return None
|
|
336
|
+
|
|
337
|
+
|
|
338
|
+
class ConnectionSync(_Connection):
|
|
339
|
+
"""
|
|
340
|
+
Synchronized connection that extends regular embedded driver with
|
|
341
|
+
push/pull and remote bootstrap capabilities.
|
|
342
|
+
"""
|
|
343
|
+
|
|
344
|
+
def __init__(
|
|
345
|
+
self,
|
|
346
|
+
conn: PyTursoConnection,
|
|
347
|
+
*,
|
|
348
|
+
sync: PyTursoSyncDatabase,
|
|
349
|
+
http_ctx: _HttpContext,
|
|
350
|
+
isolation_level: Optional[str] = "DEFERRED",
|
|
351
|
+
) -> None:
|
|
352
|
+
# Provide extra_io hook so statements can make progress with sync engine (partial sync)
|
|
353
|
+
def _extra_io() -> None:
|
|
354
|
+
_drain_sync_io(sync, http_ctx, current_op=None)
|
|
355
|
+
|
|
356
|
+
super().__init__(conn, isolation_level=isolation_level, extra_io=_extra_io)
|
|
357
|
+
self._sync: PyTursoSyncDatabase = sync
|
|
358
|
+
self._http_ctx: _HttpContext = http_ctx
|
|
359
|
+
|
|
360
|
+
def pull(self) -> bool:
|
|
361
|
+
"""
|
|
362
|
+
Pull remote changes and apply locally.
|
|
363
|
+
Returns True if new updates were pulled; False otherwise.
|
|
364
|
+
"""
|
|
365
|
+
# Wait for changes
|
|
366
|
+
changes = _run_op(self._sync, self._sync.wait_changes(), self._http_ctx)
|
|
367
|
+
# determine if empty before applying
|
|
368
|
+
if changes is None:
|
|
369
|
+
# Should not happen; treat as no changes
|
|
370
|
+
return False
|
|
371
|
+
is_empty = bool(changes.empty())
|
|
372
|
+
if is_empty:
|
|
373
|
+
return False
|
|
374
|
+
# Apply non-empty changes
|
|
375
|
+
op = self._sync.apply_changes(changes)
|
|
376
|
+
_run_op(self._sync, op, self._http_ctx)
|
|
377
|
+
return True
|
|
378
|
+
|
|
379
|
+
def push(self) -> None:
|
|
380
|
+
"""
|
|
381
|
+
Push local changes to remote.
|
|
382
|
+
"""
|
|
383
|
+
_run_op(self._sync, self._sync.push_changes(), self._http_ctx)
|
|
384
|
+
|
|
385
|
+
def checkpoint(self) -> None:
|
|
386
|
+
"""
|
|
387
|
+
Checkpoint the WAL of the synced database.
|
|
388
|
+
"""
|
|
389
|
+
_run_op(self._sync, self._sync.checkpoint(), self._http_ctx)
|
|
390
|
+
|
|
391
|
+
def stats(self) -> PyTursoSyncDatabaseStats:
|
|
392
|
+
"""
|
|
393
|
+
Collect stats about the synced database.
|
|
394
|
+
"""
|
|
395
|
+
stats = _run_op(self._sync, self._sync.stats(), self._http_ctx)
|
|
396
|
+
return stats
|
|
397
|
+
|
|
398
|
+
|
|
399
|
+
def connect_sync(
|
|
400
|
+
path: str,
|
|
401
|
+
remote_url: Union[str, Callable[[], Optional[str]]],
|
|
402
|
+
*,
|
|
403
|
+
auth_token: Optional[Union[str, Callable[[], Optional[str]]]] = None,
|
|
404
|
+
client_name: Optional[str] = None,
|
|
405
|
+
long_poll_timeout_ms: Optional[int] = None,
|
|
406
|
+
bootstrap_if_empty: bool = True,
|
|
407
|
+
partial_sync_opts: Optional[PartialSyncOpts] = None,
|
|
408
|
+
experimental_features: Optional[str] = None,
|
|
409
|
+
isolation_level: Optional[str] = "DEFERRED",
|
|
410
|
+
) -> ConnectionSync:
|
|
411
|
+
"""
|
|
412
|
+
Create and open a synchronized database connection.
|
|
413
|
+
|
|
414
|
+
- path: path to the main database file locally
|
|
415
|
+
- remote_url: remote url for the sync - can be lambda evaluated on every http request
|
|
416
|
+
- auth_token: optional token or lambda returning token, used as Authorization: Bearer <token>
|
|
417
|
+
- client_name: optional unique client name (defaults to 'turso-sync-py')
|
|
418
|
+
- long_poll_timeout_ms: timeout for long polling during pull
|
|
419
|
+
- bootstrap_if_empty: if True and db empty, bootstrap from remote during create()
|
|
420
|
+
- partial_sync_opts: optional partial sync configuration
|
|
421
|
+
- experimental_features, isolation_level: passed to underlying connection
|
|
422
|
+
"""
|
|
423
|
+
# Resolve client name
|
|
424
|
+
cname = client_name or "turso-sync-py"
|
|
425
|
+
http_ctx = _HttpContext(remote_url=remote_url, auth_token=auth_token, client_name=cname)
|
|
426
|
+
|
|
427
|
+
# Database config: async_io must be True to let Python drive IO
|
|
428
|
+
db_cfg = PyTursoDatabaseConfig(
|
|
429
|
+
path=path,
|
|
430
|
+
experimental_features=experimental_features,
|
|
431
|
+
async_io=True,
|
|
432
|
+
)
|
|
433
|
+
|
|
434
|
+
# Sync config with optional partial bootstrap strategy
|
|
435
|
+
prefix_len: Optional[int] = None
|
|
436
|
+
query_str: Optional[str] = None
|
|
437
|
+
if partial_sync_opts is not None and isinstance(partial_sync_opts.bootstrap_strategy, PartialSyncPrefixBootstrap):
|
|
438
|
+
prefix_len = int(partial_sync_opts.bootstrap_strategy.length)
|
|
439
|
+
elif partial_sync_opts is not None and isinstance(partial_sync_opts.bootstrap_strategy, PartialSyncQueryBootstrap):
|
|
440
|
+
query_str = str(partial_sync_opts.bootstrap_strategy.query)
|
|
441
|
+
|
|
442
|
+
sync_cfg = PyTursoSyncDatabaseConfig(
|
|
443
|
+
path=path,
|
|
444
|
+
client_name=cname,
|
|
445
|
+
long_poll_timeout_ms=long_poll_timeout_ms,
|
|
446
|
+
bootstrap_if_empty=bootstrap_if_empty,
|
|
447
|
+
reserved_bytes=None,
|
|
448
|
+
partial_sync_opts=PyTursoPartialSyncOpts(
|
|
449
|
+
bootstrap_strategy_prefix=prefix_len,
|
|
450
|
+
bootstrap_strategy_query=query_str,
|
|
451
|
+
segment_size=partial_sync_opts.segment_size,
|
|
452
|
+
speculative_load=partial_sync_opts.speculative_load
|
|
453
|
+
) if partial_sync_opts is not None else None,
|
|
454
|
+
)
|
|
455
|
+
|
|
456
|
+
# Create sync database holder
|
|
457
|
+
sync_db: PyTursoSyncDatabase = py_turso_sync_new(db_cfg, sync_cfg)
|
|
458
|
+
|
|
459
|
+
# Prepare + open the database with create()
|
|
460
|
+
_run_op(sync_db, sync_db.create(), http_ctx)
|
|
461
|
+
|
|
462
|
+
# Connect to obtain PyTursoConnection
|
|
463
|
+
conn_obj = _run_op(sync_db, sync_db.connect(), http_ctx)
|
|
464
|
+
if not isinstance(conn_obj, PyTursoConnection):
|
|
465
|
+
raise Misuse("sync connect did not return a connection")
|
|
466
|
+
|
|
467
|
+
# Wrap into ConnectionSync that integrates sync IO into DB operations
|
|
468
|
+
return ConnectionSync(conn_obj, sync=sync_db, http_ctx=http_ctx, isolation_level=isolation_level)
|
turso/lib_sync_aio.py
ADDED
|
@@ -0,0 +1,93 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from typing import Callable, Optional, Union, cast
|
|
4
|
+
|
|
5
|
+
from .lib_aio import (
|
|
6
|
+
Connection as NonBlockingConnection,
|
|
7
|
+
)
|
|
8
|
+
from .lib_sync import (
|
|
9
|
+
ConnectionSync as BlockingConnectionSync,
|
|
10
|
+
)
|
|
11
|
+
from .lib_sync import (
|
|
12
|
+
PartialSyncOpts,
|
|
13
|
+
PyTursoSyncDatabaseStats,
|
|
14
|
+
)
|
|
15
|
+
from .lib_sync import (
|
|
16
|
+
connect_sync as blocking_connect_sync,
|
|
17
|
+
)
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
class ConnectionSync(NonBlockingConnection):
|
|
21
|
+
def __init__(self, connector: Callable[[], BlockingConnectionSync]) -> None:
|
|
22
|
+
# Use the non-blocking driver base - runs a background worker thread
|
|
23
|
+
# that owns the underlying blocking connection instance.
|
|
24
|
+
super().__init__(connector)
|
|
25
|
+
|
|
26
|
+
async def close(self) -> None:
|
|
27
|
+
# Ensure worker is shut down and underlying blocking connection closed
|
|
28
|
+
await super().close()
|
|
29
|
+
|
|
30
|
+
# Make ConnectionSync instance awaitable with correct return typing
|
|
31
|
+
def __await__(self):
|
|
32
|
+
async def _await_open() -> "ConnectionSync":
|
|
33
|
+
await self._open_future
|
|
34
|
+
return self # the underlying connection is created at this point
|
|
35
|
+
|
|
36
|
+
return _await_open().__await__()
|
|
37
|
+
|
|
38
|
+
async def __aenter__(self) -> "ConnectionSync":
|
|
39
|
+
await self
|
|
40
|
+
return self
|
|
41
|
+
|
|
42
|
+
async def __aexit__(self, exc_type, exc, tb) -> None:
|
|
43
|
+
await self.close()
|
|
44
|
+
|
|
45
|
+
# Synchronization API (async wrappers scheduling work on the worker thread)
|
|
46
|
+
|
|
47
|
+
async def pull(self) -> bool:
|
|
48
|
+
# Pull remote changes and apply locally; returns True if any updates were fetched
|
|
49
|
+
return await self._run(lambda: cast(BlockingConnectionSync, self._conn).pull()) # type: ignore[union-attr]
|
|
50
|
+
|
|
51
|
+
async def push(self) -> None:
|
|
52
|
+
# Push local changes to the remote
|
|
53
|
+
await self._run(lambda: cast(BlockingConnectionSync, self._conn).push()) # type: ignore[union-attr]
|
|
54
|
+
|
|
55
|
+
async def checkpoint(self) -> None:
|
|
56
|
+
# Checkpoint the WAL of the synced database
|
|
57
|
+
await self._run(lambda: cast(BlockingConnectionSync, self._conn).checkpoint()) # type: ignore[union-attr]
|
|
58
|
+
|
|
59
|
+
async def stats(self) -> PyTursoSyncDatabaseStats:
|
|
60
|
+
# Collect stats about the synced database
|
|
61
|
+
return await self._run(lambda: cast(BlockingConnectionSync, self._conn).stats()) # type: ignore[union-attr]
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
# connect is not async because it returns awaitable ConnectionSync
|
|
65
|
+
# Same signature as in the lib_sync.connect_sync
|
|
66
|
+
def connect_sync(
|
|
67
|
+
path: str,
|
|
68
|
+
remote_url: Union[str, Callable[[], Optional[str]]],
|
|
69
|
+
*,
|
|
70
|
+
auth_token: Optional[Union[str, Callable[[], Optional[str]]]] = None,
|
|
71
|
+
client_name: Optional[str] = None,
|
|
72
|
+
long_poll_timeout_ms: Optional[int] = None,
|
|
73
|
+
bootstrap_if_empty: bool = True,
|
|
74
|
+
partial_sync_opts: Optional[PartialSyncOpts] = None,
|
|
75
|
+
experimental_features: Optional[str] = None,
|
|
76
|
+
isolation_level: Optional[str] = "DEFERRED",
|
|
77
|
+
) -> ConnectionSync:
|
|
78
|
+
# Connector creating the blocking synchronized connection in the worker thread
|
|
79
|
+
def _connector() -> BlockingConnectionSync:
|
|
80
|
+
return blocking_connect_sync(
|
|
81
|
+
path,
|
|
82
|
+
remote_url,
|
|
83
|
+
auth_token=auth_token,
|
|
84
|
+
client_name=client_name,
|
|
85
|
+
long_poll_timeout_ms=long_poll_timeout_ms,
|
|
86
|
+
bootstrap_if_empty=bootstrap_if_empty,
|
|
87
|
+
partial_sync_opts=partial_sync_opts,
|
|
88
|
+
experimental_features=experimental_features,
|
|
89
|
+
isolation_level=isolation_level,
|
|
90
|
+
)
|
|
91
|
+
|
|
92
|
+
# Return awaitable async wrapper with sync extras
|
|
93
|
+
return ConnectionSync(_connector)
|
turso/sync/__init__.py
ADDED
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
from ..lib_sync import (
|
|
2
|
+
PartialSyncOpts,
|
|
3
|
+
PartialSyncPrefixBootstrap,
|
|
4
|
+
PartialSyncQueryBootstrap,
|
|
5
|
+
)
|
|
6
|
+
from ..lib_sync import (
|
|
7
|
+
connect_sync as connect,
|
|
8
|
+
)
|
|
9
|
+
|
|
10
|
+
__all__ = [
|
|
11
|
+
"connect",
|
|
12
|
+
"PartialSyncOpts",
|
|
13
|
+
"PartialSyncPrefixBootstrap",
|
|
14
|
+
"PartialSyncQueryBootstrap",
|
|
15
|
+
]
|
turso/worker.py
ADDED
|
@@ -0,0 +1,43 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
from queue import SimpleQueue
|
|
3
|
+
from threading import Thread
|
|
4
|
+
from typing import Any, Callable
|
|
5
|
+
|
|
6
|
+
STOP_RUNNING_SENTINEL = object()
|
|
7
|
+
|
|
8
|
+
class Worker(Thread):
|
|
9
|
+
"""
|
|
10
|
+
Dedicated worker thread executing database operations sequentially.
|
|
11
|
+
|
|
12
|
+
The worker consumes (future, callable) items from the unbounded SimpleQueue.
|
|
13
|
+
It executes the callable, then sets result or mapped exception on the future
|
|
14
|
+
using loop.call_soon_threadsafe to synchronize with the event loop thread.
|
|
15
|
+
|
|
16
|
+
If work item return STOP_RUNNING_SENTINEL value - it stops the execution
|
|
17
|
+
(e.g. this can be used to stop worker when connection is about to close)
|
|
18
|
+
"""
|
|
19
|
+
|
|
20
|
+
def __init__(
|
|
21
|
+
self,
|
|
22
|
+
queue: SimpleQueue[tuple[asyncio.Future, Callable[[], Any]] | None],
|
|
23
|
+
loop: asyncio.AbstractEventLoop,
|
|
24
|
+
) -> None:
|
|
25
|
+
super().__init__(name="turso-async-worker", daemon=True)
|
|
26
|
+
self._queue = queue
|
|
27
|
+
self._loop = loop
|
|
28
|
+
|
|
29
|
+
def run(self) -> None:
|
|
30
|
+
while True:
|
|
31
|
+
item = self._queue.get()
|
|
32
|
+
fut, func = item
|
|
33
|
+
if fut.cancelled():
|
|
34
|
+
# Still consume but skip execution if already cancelled
|
|
35
|
+
continue
|
|
36
|
+
try:
|
|
37
|
+
result = func()
|
|
38
|
+
if result is STOP_RUNNING_SENTINEL:
|
|
39
|
+
break
|
|
40
|
+
except Exception as e:
|
|
41
|
+
self._loop.call_soon_threadsafe(fut.set_exception, e)
|
|
42
|
+
else:
|
|
43
|
+
self._loop.call_soon_threadsafe(fut.set_result, result)
|
|
@@ -1,33 +0,0 @@
|
|
|
1
|
-
Metadata-Version: 2.4
|
|
2
|
-
Name: pyturso
|
|
3
|
-
Version: 0.1.5rc5
|
|
4
|
-
Classifier: Development Status :: 3 - Alpha
|
|
5
|
-
Classifier: Programming Language :: Python
|
|
6
|
-
Classifier: Programming Language :: Python :: 3
|
|
7
|
-
Classifier: Programming Language :: Python :: 3 :: Only
|
|
8
|
-
Classifier: Programming Language :: Python :: 3.9
|
|
9
|
-
Classifier: Programming Language :: Python :: 3.10
|
|
10
|
-
Classifier: Programming Language :: Python :: 3.11
|
|
11
|
-
Classifier: Programming Language :: Python :: 3.12
|
|
12
|
-
Classifier: Programming Language :: Python :: 3.13
|
|
13
|
-
Classifier: Programming Language :: Rust
|
|
14
|
-
Classifier: License :: OSI Approved :: MIT License
|
|
15
|
-
Classifier: Operating System :: POSIX :: Linux
|
|
16
|
-
Classifier: Operating System :: Microsoft :: Windows
|
|
17
|
-
Classifier: Operating System :: MacOS
|
|
18
|
-
Classifier: Topic :: Database
|
|
19
|
-
Classifier: Topic :: Software Development :: Libraries
|
|
20
|
-
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
|
21
|
-
Classifier: Topic :: Database :: Database Engines/Servers
|
|
22
|
-
Requires-Dist: typing-extensions>=4.6.0,!=4.7.0
|
|
23
|
-
Requires-Dist: mypy==1.11.0 ; extra == 'dev'
|
|
24
|
-
Requires-Dist: pytest==8.3.1 ; extra == 'dev'
|
|
25
|
-
Requires-Dist: pytest-cov==5.0.0 ; extra == 'dev'
|
|
26
|
-
Requires-Dist: ruff==0.5.4 ; extra == 'dev'
|
|
27
|
-
Requires-Dist: coverage==7.6.1 ; extra == 'dev'
|
|
28
|
-
Requires-Dist: maturin==1.7.8 ; extra == 'dev'
|
|
29
|
-
Provides-Extra: dev
|
|
30
|
-
Summary: Turso is a work-in-progress, in-process OLTP database management system, compatible with SQLite.
|
|
31
|
-
Requires-Python: >=3.9
|
|
32
|
-
Project-URL: Homepage, https://github.com/tursodatabase/turso
|
|
33
|
-
Project-URL: Source, https://github.com/tursodatabase/turso
|
|
@@ -1,6 +0,0 @@
|
|
|
1
|
-
pyturso-0.1.5rc5.dist-info/METADATA,sha256=D5wB9FjVEkGnPnwA3YhZQbSwObyD-J4wqPozojukXpo,1544
|
|
2
|
-
pyturso-0.1.5rc5.dist-info/WHEEL,sha256=aXz49xVjjC2bkgTnE4xcanfAmG9wdfNG_Q2OldK7oKM,104
|
|
3
|
-
turso/__init__.py,sha256=xXZ01fnUZ6SeCHtoDMrDyDns1gnUUIHvbwb3cAtIexM,499
|
|
4
|
-
turso/_turso.cpython-310-darwin.so,sha256=6ZXWEF15eTASm-mmWLzVkgu19JK_zGPvXx9Ylh0abNw,5020896
|
|
5
|
-
turso/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
6
|
-
pyturso-0.1.5rc5.dist-info/RECORD,,
|