mapillary-tools 0.14.3__py3-none-any.whl → 0.14.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1 +1 @@
1
- VERSION = "0.14.3"
1
+ VERSION = "0.14.4"
@@ -1,24 +1,17 @@
1
1
  from __future__ import annotations
2
2
 
3
- import contextlib
4
- import dbm
5
3
  import json
6
4
  import logging
5
+ import os
6
+ import sqlite3
7
7
  import string
8
8
  import threading
9
9
  import time
10
10
  import typing as T
11
+ from functools import wraps
11
12
  from pathlib import Path
12
13
 
13
- # dbm modules are dynamically imported, so here we explicitly import dbm.sqlite3 to make sure pyinstaller include it
14
- # Otherwise you will see: ImportError: no dbm clone found; tried ['dbm.sqlite3', 'dbm.gnu', 'dbm.ndbm', 'dbm.dumb']
15
- try:
16
- import dbm.sqlite3 # type: ignore
17
- except ImportError:
18
- pass
19
-
20
-
21
- from . import constants, types
14
+ from . import constants, store, types
22
15
  from .serializer.description import DescriptionJSONSerializer
23
16
 
24
17
  JSONDict = T.Dict[str, T.Union[str, int, float, None]]
@@ -85,103 +78,140 @@ def write_history(
85
78
  fp.write(json.dumps(history))
86
79
 
87
80
 
81
+ def _retry_on_database_lock_error(fn):
82
+ """
83
+ Decorator to retry a function if it raises a sqlite3.OperationalError with
84
+ "database is locked" in the message.
85
+ """
86
+
87
+ @wraps(fn)
88
+ def wrapper(*args, **kwargs):
89
+ while True:
90
+ try:
91
+ return fn(*args, **kwargs)
92
+ except sqlite3.OperationalError as ex:
93
+ if "database is locked" in str(ex).lower():
94
+ LOG.warning(f"{str(ex)}")
95
+ LOG.info("Retrying in 1 second...")
96
+ time.sleep(1)
97
+ else:
98
+ raise ex
99
+
100
+ return wrapper
101
+
102
+
88
103
  class PersistentCache:
89
- _lock: contextlib.nullcontext | threading.Lock
104
+ _lock: threading.Lock
90
105
 
91
106
  def __init__(self, file: str):
92
- # SQLite3 backend supports concurrent access without a lock
93
- if dbm.whichdb(file) == "dbm.sqlite3":
94
- self._lock = contextlib.nullcontext()
95
- else:
96
- self._lock = threading.Lock()
97
107
  self._file = file
108
+ self._lock = threading.Lock()
98
109
 
99
110
  def get(self, key: str) -> str | None:
111
+ if not self._db_existed():
112
+ return None
113
+
100
114
  s = time.perf_counter()
101
115
 
102
- with self._lock:
103
- with dbm.open(self._file, flag="c") as db:
104
- value: bytes | None = db.get(key)
116
+ with store.KeyValueStore(self._file, flag="r") as db:
117
+ try:
118
+ raw_payload: bytes | None = db.get(key) # data retrieved from db[key]
119
+ except Exception as ex:
120
+ if self._table_not_found(ex):
121
+ return None
122
+ raise ex
105
123
 
106
- if value is None:
124
+ if raw_payload is None:
107
125
  return None
108
126
 
109
- payload = self._decode(value)
127
+ data: JSONDict = self._decode(raw_payload) # JSON dict decoded from db[key]
110
128
 
111
- if self._is_expired(payload):
129
+ if self._is_expired(data):
112
130
  return None
113
131
 
114
- file_handle = payload.get("file_handle")
132
+ cached_value = data.get("value") # value in the JSON dict decoded from db[key]
115
133
 
116
134
  LOG.debug(
117
135
  f"Found file handle for {key} in cache ({(time.perf_counter() - s) * 1000:.0f} ms)"
118
136
  )
119
137
 
120
- return T.cast(str, file_handle)
138
+ return T.cast(str, cached_value)
121
139
 
122
- def set(self, key: str, file_handle: str, expires_in: int = 3600 * 24 * 2) -> None:
140
+ @_retry_on_database_lock_error
141
+ def set(self, key: str, value: str, expires_in: int = 3600 * 24 * 2) -> None:
123
142
  s = time.perf_counter()
124
143
 
125
- payload = {
144
+ data = {
126
145
  "expires_at": time.time() + expires_in,
127
- "file_handle": file_handle,
146
+ "value": value,
128
147
  }
129
148
 
130
- value: bytes = json.dumps(payload).encode("utf-8")
149
+ payload: bytes = json.dumps(data).encode("utf-8")
131
150
 
132
151
  with self._lock:
133
- with dbm.open(self._file, flag="c") as db:
134
- db[key] = value
152
+ with store.KeyValueStore(self._file, flag="c") as db:
153
+ db[key] = payload
135
154
 
136
155
  LOG.debug(
137
156
  f"Cached file handle for {key} ({(time.perf_counter() - s) * 1000:.0f} ms)"
138
157
  )
139
158
 
159
+ @_retry_on_database_lock_error
140
160
  def clear_expired(self) -> list[str]:
141
- s = time.perf_counter()
142
-
143
161
  expired_keys: list[str] = []
144
162
 
145
- with self._lock:
146
- with dbm.open(self._file, flag="c") as db:
147
- if hasattr(db, "items"):
148
- items: T.Iterable[tuple[str | bytes, bytes]] = db.items()
149
- else:
150
- items = ((key, db[key]) for key in db.keys())
163
+ s = time.perf_counter()
151
164
 
152
- for key, value in items:
153
- payload = self._decode(value)
154
- if self._is_expired(payload):
165
+ with self._lock:
166
+ with store.KeyValueStore(self._file, flag="c") as db:
167
+ for key, raw_payload in db.items():
168
+ data = self._decode(raw_payload)
169
+ if self._is_expired(data):
155
170
  del db[key]
156
171
  expired_keys.append(T.cast(str, key))
157
172
 
158
- if expired_keys:
159
- LOG.debug(
160
- f"Cleared {len(expired_keys)} expired entries from the cache ({(time.perf_counter() - s) * 1000:.0f} ms)"
161
- )
173
+ LOG.debug(
174
+ f"Cleared {len(expired_keys)} expired entries from the cache ({(time.perf_counter() - s) * 1000:.0f} ms)"
175
+ )
162
176
 
163
177
  return expired_keys
164
178
 
165
- def keys(self):
166
- with self._lock:
167
- with dbm.open(self._file, flag="c") as db:
168
- return db.keys()
179
+ def keys(self) -> list[str]:
180
+ if not self._db_existed():
181
+ return []
169
182
 
170
- def _is_expired(self, payload: JSONDict) -> bool:
171
- expires_at = payload.get("expires_at")
183
+ try:
184
+ with store.KeyValueStore(self._file, flag="r") as db:
185
+ return [key.decode("utf-8") for key in db.keys()]
186
+ except Exception as ex:
187
+ if self._table_not_found(ex):
188
+ return []
189
+ raise ex
190
+
191
+ def _is_expired(self, data: JSONDict) -> bool:
192
+ expires_at = data.get("expires_at")
172
193
  if isinstance(expires_at, (int, float)):
173
194
  return expires_at is None or expires_at <= time.time()
174
195
  return False
175
196
 
176
- def _decode(self, value: bytes) -> JSONDict:
197
+ def _decode(self, raw_payload: bytes) -> JSONDict:
177
198
  try:
178
- payload = json.loads(value.decode("utf-8"))
199
+ data = json.loads(raw_payload.decode("utf-8"))
179
200
  except json.JSONDecodeError as ex:
180
201
  LOG.warning(f"Failed to decode cache value: {ex}")
181
202
  return {}
182
203
 
183
- if not isinstance(payload, dict):
184
- LOG.warning(f"Invalid cache value format: {payload}")
204
+ if not isinstance(data, dict):
205
+ LOG.warning(f"Invalid cache value format: {raw_payload!r}")
185
206
  return {}
186
207
 
187
- return payload
208
+ return data
209
+
210
+ def _db_existed(self) -> bool:
211
+ return os.path.exists(self._file)
212
+
213
+ def _table_not_found(self, ex: Exception) -> bool:
214
+ if isinstance(ex, sqlite3.OperationalError):
215
+ if "no such table" in str(ex):
216
+ return True
217
+ return False
@@ -0,0 +1,128 @@
1
+ """
2
+ This module provides a persistent key-value store based on SQLite.
3
+
4
+ This implementation is mostly copied from dbm.sqlite3 in the Python standard library,
5
+ but works for Python >= 3.9, whereas dbm.sqlite3 is only available for Python 3.13.
6
+
7
+ Source: https://github.com/python/cpython/blob/3.13/Lib/dbm/sqlite3.py
8
+ """
9
+
10
+ import os
11
+ import sqlite3
12
+ import sys
13
+ from collections.abc import MutableMapping
14
+ from contextlib import closing, suppress
15
+ from pathlib import Path
16
+
17
+ BUILD_TABLE = """
18
+ CREATE TABLE IF NOT EXISTS Dict (
19
+ key BLOB UNIQUE NOT NULL,
20
+ value BLOB NOT NULL
21
+ )
22
+ """
23
+ GET_SIZE = "SELECT COUNT (key) FROM Dict"
24
+ LOOKUP_KEY = "SELECT value FROM Dict WHERE key = CAST(? AS BLOB)"
25
+ STORE_KV = "REPLACE INTO Dict (key, value) VALUES (CAST(? AS BLOB), CAST(? AS BLOB))"
26
+ DELETE_KEY = "DELETE FROM Dict WHERE key = CAST(? AS BLOB)"
27
+ ITER_KEYS = "SELECT key FROM Dict"
28
+
29
+
30
+ def _normalize_uri(path):
31
+ path = Path(path)
32
+ uri = path.absolute().as_uri()
33
+ while "//" in uri:
34
+ uri = uri.replace("//", "/")
35
+ return uri
36
+
37
+
38
+ class KeyValueStore(MutableMapping):
39
+ def __init__(self, path, /, *, flag="r", mode=0o666):
40
+ """Open a key-value database and return the object.
41
+
42
+ The 'path' parameter is the name of the database file.
43
+
44
+ The optional 'flag' parameter can be one of ...:
45
+ 'r' (default): open an existing database for read only access
46
+ 'w': open an existing database for read/write access
47
+ 'c': create a database if it does not exist; open for read/write access
48
+ 'n': always create a new, empty database; open for read/write access
49
+
50
+ The optional 'mode' parameter is the Unix file access mode of the database;
51
+ only used when creating a new database. Default: 0o666.
52
+ """
53
+ path = os.fsdecode(path)
54
+ if flag == "r":
55
+ flag = "ro"
56
+ elif flag == "w":
57
+ flag = "rw"
58
+ elif flag == "c":
59
+ flag = "rwc"
60
+ Path(path).touch(mode=mode, exist_ok=True)
61
+ elif flag == "n":
62
+ flag = "rwc"
63
+ Path(path).unlink(missing_ok=True)
64
+ Path(path).touch(mode=mode)
65
+ else:
66
+ raise ValueError(f"Flag must be one of 'r', 'w', 'c', or 'n', not {flag!r}")
67
+
68
+ # We use the URI format when opening the database.
69
+ uri = _normalize_uri(path)
70
+ uri = f"{uri}?mode={flag}"
71
+
72
+ if sys.version_info >= (3, 12):
73
+ # This is the preferred way, but only available in Python 3.10 and newer.
74
+ self._cx = sqlite3.connect(uri, autocommit=True, uri=True)
75
+ else:
76
+ self._cx = sqlite3.connect(uri, uri=True)
77
+
78
+ # This is an optimization only; it's ok if it fails.
79
+ with suppress(sqlite3.OperationalError):
80
+ self._cx.execute("PRAGMA journal_mode = wal")
81
+
82
+ if flag == "rwc":
83
+ self._execute(BUILD_TABLE)
84
+
85
+ def _execute(self, *args, **kwargs):
86
+ if sys.version_info >= (3, 12):
87
+ return closing(self._cx.execute(*args, **kwargs))
88
+ else:
89
+ # Use a context manager to commit the changes
90
+ with self._cx:
91
+ return closing(self._cx.execute(*args, **kwargs))
92
+
93
+ def __len__(self):
94
+ with self._execute(GET_SIZE) as cu:
95
+ row = cu.fetchone()
96
+ return row[0]
97
+
98
+ def __getitem__(self, key):
99
+ with self._execute(LOOKUP_KEY, (key,)) as cu:
100
+ row = cu.fetchone()
101
+ if not row:
102
+ raise KeyError(key)
103
+ return row[0]
104
+
105
+ def __setitem__(self, key, value):
106
+ self._execute(STORE_KV, (key, value))
107
+
108
+ def __delitem__(self, key):
109
+ with self._execute(DELETE_KEY, (key,)) as cu:
110
+ if not cu.rowcount:
111
+ raise KeyError(key)
112
+
113
+ def __iter__(self):
114
+ with self._execute(ITER_KEYS) as cu:
115
+ for row in cu:
116
+ yield row[0]
117
+
118
+ def close(self):
119
+ self._cx.close()
120
+
121
+ def keys(self):
122
+ return list(super().keys())
123
+
124
+ def __enter__(self):
125
+ return self
126
+
127
+ def __exit__(self, *args):
128
+ self.close()
@@ -1311,7 +1311,7 @@ def _is_uuid(key: str) -> bool:
1311
1311
 
1312
1312
 
1313
1313
  def _build_upload_cache_path(upload_options: UploadOptions) -> Path:
1314
- # Different python/CLI versions use different cache (dbm) formats.
1314
+ # Different python/CLI versions use different cache formats.
1315
1315
  # Separate them to avoid conflicts
1316
1316
  py_version_parts = [str(part) for part in sys.version_info[:3]]
1317
1317
  version = f"py_{'_'.join(py_version_parts)}_{VERSION}"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: mapillary_tools
3
- Version: 0.14.3
3
+ Version: 0.14.4
4
4
  Summary: Mapillary Image/Video Import Pipeline
5
5
  Author-email: Mapillary <support@mapillary.com>
6
6
  License: BSD
@@ -1,4 +1,4 @@
1
- mapillary_tools/__init__.py,sha256=-BdvXvwpHU687pG7vjnbfxs01J_K3Vy-_CnU53ctcY0,19
1
+ mapillary_tools/__init__.py,sha256=SlLq3LO5ZN4Ap8_XR8x6nXyckJ4qcIktiWVQg34oqMY,19
2
2
  mapillary_tools/api_v4.py,sha256=bckAU_atUs0pSuqySeY4W0Rs011a21ClJHo_mbbcXXw,4864
3
3
  mapillary_tools/authenticate.py,sha256=mmaOwjQ444DcX4lRw2ms3naBg5Y_xwIJAIWeVdsQfqM,11742
4
4
  mapillary_tools/blackvue_parser.py,sha256=ea2JtU9MWU6yB0bQlF970_Of0bJVofSTRq1P30WKW-0,5623
@@ -12,17 +12,18 @@ mapillary_tools/exiftool_read_video.py,sha256=23O_bjUOVq6j7i3xMz6fY-XIEsjinsCejK
12
12
  mapillary_tools/exiftool_runner.py,sha256=g4gSyqeh3D6EnMJ-c3s-RnO2EP_jD354Qkaz0Y-4D04,1658
13
13
  mapillary_tools/ffmpeg.py,sha256=akpvvsjAR-Iiv-hOrUoJvPM9vUU3JqMQ5HJL1_NgwB8,22908
14
14
  mapillary_tools/geo.py,sha256=mWaESfDf_zHmyvnt5aVFro4FGrjiULNsuZ6HfGUWvSA,11009
15
- mapillary_tools/history.py,sha256=zyXYXB8pO9Buffn-8-Ien4s74hGD7fyPr2QpBeZwEWw,5478
15
+ mapillary_tools/history.py,sha256=MoJVp2D-JUPoORDvNhGt-2dgBstPLZ4nyPToIuqIAg4,6287
16
16
  mapillary_tools/http.py,sha256=-df_oGyImO2AOmPnXcKMcztlL4LOZLArE6ki81NMGUA,6411
17
17
  mapillary_tools/ipc.py,sha256=DwWQb9hNshx0bg0Fo5NjY0mXjs-FkbR6tIQmjMgMtmg,1089
18
18
  mapillary_tools/process_geotag_properties.py,sha256=3EaVvjfKB-O38OjopBcxeEdP6qI5IPIxqmO6isjcXKM,14205
19
19
  mapillary_tools/process_sequence_properties.py,sha256=n4VjQHrgVjksIr3WoBviRhrQIBBDHGXMClolfyz6tu4,24057
20
20
  mapillary_tools/sample_video.py,sha256=pKSj1Vc8e5p1XGjykBuKY9XieTOskc-9L3F4L407jDM,13935
21
+ mapillary_tools/store.py,sha256=dA1D0afDvhVm0MYEI1yA5FfqjQM5etYK4pcfMc4nKAU,4077
21
22
  mapillary_tools/telemetry.py,sha256=lL6qQbtOZft4DZZrCNK3njlwHT_30zLyYS_YRN5pgHY,1568
22
23
  mapillary_tools/types.py,sha256=pIU2wcxiOUWT5Pd05pgNzY9EVEDlwoldtlF2IIYYvE0,5909
23
24
  mapillary_tools/upload.py,sha256=XejAgmVW4Y33MiQ2g-shvHZA_zXTekEsOUHUHNx2AE4,24047
24
25
  mapillary_tools/upload_api_v4.py,sha256=VgOf7RhfUuzmlSBUp5CpekKIJ0xQrC0r-r0Ds9-wU4I,7344
25
- mapillary_tools/uploader.py,sha256=4bd2YGIAJOK5Jx3ZLIzkLAAfBtU2F708_lTtatJvVas,46642
26
+ mapillary_tools/uploader.py,sha256=T2BNlncuFibg5RJ7c2qweUXDAg6-zYc-rdgbV_JEKDU,46636
26
27
  mapillary_tools/utils.py,sha256=HjTZ01GQv_UNGySaTZ_Mc1Gn_Y0x3knQf7Vh17whDFw,8108
27
28
  mapillary_tools/camm/camm_builder.py,sha256=ub6Z9ijep8zAo1NOlU51Gxk95kQ2vfN58YgVCLmNMRk,9211
28
29
  mapillary_tools/camm/camm_parser.py,sha256=aNHP65hNXYQBWBTfhaj_S5XYzmAHhjwcAfGhbm83__o,18043
@@ -68,9 +69,9 @@ mapillary_tools/mp4/simple_mp4_builder.py,sha256=9TUGk1hzI6mQFN1P30jwHL3dCYz3Zz7
68
69
  mapillary_tools/mp4/simple_mp4_parser.py,sha256=g3vvPhBoNu7anhVzC5_XQCV7IwfRWro1vJ6d6GyDkHE,6315
69
70
  mapillary_tools/serializer/description.py,sha256=ECnQxC-1LOgkAKE5qFi9Y2KuCeH8KPUjjNFDiwebjvo,18647
70
71
  mapillary_tools/serializer/gpx.py,sha256=_xx6gHjaWHrlXaUpB5GGBrbRKzbExFyIzWWAH-CvksI,4383
71
- mapillary_tools-0.14.3.dist-info/licenses/LICENSE,sha256=l2D8cKfFmmJq_wcVq_JElPJrlvWQOzNWx7gMLINucxc,1292
72
- mapillary_tools-0.14.3.dist-info/METADATA,sha256=f-tqdyREvL0ZXxfm_Mao2KdWkLsWhHzglP6S6SYMjTU,22200
73
- mapillary_tools-0.14.3.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
74
- mapillary_tools-0.14.3.dist-info/entry_points.txt,sha256=A3f3LP-BO_P-U8Y29QfpT4jx6Mjk3sXjTi2Yew4bvj8,75
75
- mapillary_tools-0.14.3.dist-info/top_level.txt,sha256=FbDkMgOrt1S70ho1WSBrOwzKOSkJFDwwqFOoY5-527s,16
76
- mapillary_tools-0.14.3.dist-info/RECORD,,
72
+ mapillary_tools-0.14.4.dist-info/licenses/LICENSE,sha256=l2D8cKfFmmJq_wcVq_JElPJrlvWQOzNWx7gMLINucxc,1292
73
+ mapillary_tools-0.14.4.dist-info/METADATA,sha256=DmtexHTWyoTovIswRtvkT638DbdPNXcfVfPcaCflu4w,22200
74
+ mapillary_tools-0.14.4.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
75
+ mapillary_tools-0.14.4.dist-info/entry_points.txt,sha256=A3f3LP-BO_P-U8Y29QfpT4jx6Mjk3sXjTi2Yew4bvj8,75
76
+ mapillary_tools-0.14.4.dist-info/top_level.txt,sha256=FbDkMgOrt1S70ho1WSBrOwzKOSkJFDwwqFOoY5-527s,16
77
+ mapillary_tools-0.14.4.dist-info/RECORD,,