mapillary-tools 0.13.2__py3-none-any.whl → 0.13.3a1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- mapillary_tools/__init__.py +1 -1
- mapillary_tools/constants.py +1 -1
- mapillary_tools/upload_api_v4.py +68 -61
- mapillary_tools/uploader.py +0 -2
- {mapillary_tools-0.13.2.dist-info → mapillary_tools-0.13.3a1.dist-info}/METADATA +1 -1
- {mapillary_tools-0.13.2.dist-info → mapillary_tools-0.13.3a1.dist-info}/RECORD +10 -10
- {mapillary_tools-0.13.2.dist-info → mapillary_tools-0.13.3a1.dist-info}/WHEEL +1 -1
- {mapillary_tools-0.13.2.dist-info → mapillary_tools-0.13.3a1.dist-info}/LICENSE +0 -0
- {mapillary_tools-0.13.2.dist-info → mapillary_tools-0.13.3a1.dist-info}/entry_points.txt +0 -0
- {mapillary_tools-0.13.2.dist-info → mapillary_tools-0.13.3a1.dist-info}/top_level.txt +0 -0
mapillary_tools/__init__.py
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
VERSION = "0.13.
|
|
1
|
+
VERSION = "0.13.3a1"
|
mapillary_tools/constants.py
CHANGED
|
@@ -45,6 +45,6 @@ GOPRO_GPS_PRECISION = float(os.getenv(_ENV_PREFIX + "GOPRO_GPS_PRECISION", 15))
|
|
|
45
45
|
# Max number of images per sequence
|
|
46
46
|
MAX_SEQUENCE_LENGTH = int(os.getenv(_ENV_PREFIX + "MAX_SEQUENCE_LENGTH", 1000))
|
|
47
47
|
# Max file size per sequence (sum of image filesizes in the sequence)
|
|
48
|
-
MAX_SEQUENCE_FILESIZE: str = os.getenv(_ENV_PREFIX + "MAX_SEQUENCE_FILESIZE", "
|
|
48
|
+
MAX_SEQUENCE_FILESIZE: str = os.getenv(_ENV_PREFIX + "MAX_SEQUENCE_FILESIZE", "110G")
|
|
49
49
|
# Max number of pixels per sequence (sum of image pixels in the sequence)
|
|
50
50
|
MAX_SEQUENCE_PIXELS: str = os.getenv(_ENV_PREFIX + "MAX_SEQUENCE_PIXELS", "6G")
|
mapillary_tools/upload_api_v4.py
CHANGED
|
@@ -5,6 +5,7 @@ import logging
|
|
|
5
5
|
import os
|
|
6
6
|
import random
|
|
7
7
|
import typing as T
|
|
8
|
+
import uuid
|
|
8
9
|
|
|
9
10
|
import requests
|
|
10
11
|
|
|
@@ -55,31 +56,31 @@ def _truncate_end(s: _S) -> _S:
|
|
|
55
56
|
|
|
56
57
|
class UploadService:
|
|
57
58
|
user_access_token: str
|
|
58
|
-
entity_size: int
|
|
59
59
|
session_key: str
|
|
60
60
|
callbacks: T.List[T.Callable[[bytes, T.Optional[requests.Response]], None]]
|
|
61
61
|
cluster_filetype: ClusterFileType
|
|
62
62
|
organization_id: T.Optional[T.Union[str, int]]
|
|
63
63
|
chunk_size: int
|
|
64
64
|
|
|
65
|
+
MIME_BY_CLUSTER_TYPE: T.Dict[ClusterFileType, str] = {
|
|
66
|
+
ClusterFileType.ZIP: "application/zip",
|
|
67
|
+
ClusterFileType.BLACKVUE: "video/mp4",
|
|
68
|
+
ClusterFileType.CAMM: "video/mp4",
|
|
69
|
+
}
|
|
70
|
+
|
|
65
71
|
def __init__(
|
|
66
72
|
self,
|
|
67
73
|
user_access_token: str,
|
|
68
74
|
session_key: str,
|
|
69
|
-
entity_size: int,
|
|
70
75
|
organization_id: T.Optional[T.Union[str, int]] = None,
|
|
71
76
|
cluster_filetype: ClusterFileType = ClusterFileType.ZIP,
|
|
72
77
|
chunk_size: int = DEFAULT_CHUNK_SIZE,
|
|
73
78
|
):
|
|
74
|
-
if entity_size <= 0:
|
|
75
|
-
raise ValueError(f"Expect positive entity size but got {entity_size}")
|
|
76
|
-
|
|
77
79
|
if chunk_size <= 0:
|
|
78
80
|
raise ValueError("Expect positive chunk size")
|
|
79
81
|
|
|
80
82
|
self.user_access_token = user_access_token
|
|
81
83
|
self.session_key = session_key
|
|
82
|
-
self.entity_size = entity_size
|
|
83
84
|
self.organization_id = organization_id
|
|
84
85
|
# validate the input
|
|
85
86
|
self.cluster_filetype = ClusterFileType(cluster_filetype)
|
|
@@ -107,55 +108,66 @@ class UploadService:
|
|
|
107
108
|
data: T.IO[bytes],
|
|
108
109
|
offset: T.Optional[int] = None,
|
|
109
110
|
) -> str:
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
entity_type_map: T.Dict[ClusterFileType, str] = {
|
|
114
|
-
ClusterFileType.ZIP: "application/zip",
|
|
115
|
-
ClusterFileType.BLACKVUE: "video/mp4",
|
|
116
|
-
ClusterFileType.CAMM: "video/mp4",
|
|
117
|
-
}
|
|
118
|
-
|
|
119
|
-
entity_type = entity_type_map[self.cluster_filetype]
|
|
120
|
-
|
|
121
|
-
data.seek(offset, io.SEEK_CUR)
|
|
111
|
+
chunks = self._chunkize_byte_stream(data)
|
|
112
|
+
return self.upload_chunks(chunks, offset=offset)
|
|
122
113
|
|
|
114
|
+
def _chunkize_byte_stream(
|
|
115
|
+
self, stream: T.IO[bytes]
|
|
116
|
+
) -> T.Generator[bytes, None, None]:
|
|
123
117
|
while True:
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
118
|
+
data = stream.read(self.chunk_size)
|
|
119
|
+
if not data:
|
|
120
|
+
break
|
|
121
|
+
yield data
|
|
122
|
+
|
|
123
|
+
def _offset_chunks(
|
|
124
|
+
self, chunks: T.Iterable[bytes], offset: int
|
|
125
|
+
) -> T.Generator[bytes, None, None]:
|
|
126
|
+
assert offset >= 0, f"Expect non-negative offset but got {offset}"
|
|
127
|
+
|
|
128
|
+
for chunk in chunks:
|
|
129
|
+
if offset:
|
|
130
|
+
if offset < len(chunk):
|
|
131
|
+
yield chunk[offset:]
|
|
132
|
+
offset = 0
|
|
133
|
+
else:
|
|
134
|
+
offset -= len(chunk)
|
|
135
|
+
else:
|
|
136
|
+
yield chunk
|
|
137
|
+
|
|
138
|
+
def _attach_callbacks(
|
|
139
|
+
self, chunks: T.Iterable[bytes]
|
|
140
|
+
) -> T.Generator[bytes, None, None]:
|
|
141
|
+
for chunk in chunks:
|
|
142
|
+
yield chunk
|
|
148
143
|
for callback in self.callbacks:
|
|
149
|
-
callback(chunk,
|
|
150
|
-
# we can assert that offset == self.fetch_offset(session_key)
|
|
151
|
-
# otherwise, server will throw
|
|
144
|
+
callback(chunk, None)
|
|
152
145
|
|
|
153
|
-
|
|
154
|
-
|
|
146
|
+
def upload_chunks(
|
|
147
|
+
self,
|
|
148
|
+
chunks: T.Iterable[bytes],
|
|
149
|
+
offset: T.Optional[int] = None,
|
|
150
|
+
) -> str:
|
|
151
|
+
if offset is None:
|
|
152
|
+
offset = self.fetch_offset()
|
|
155
153
|
|
|
156
|
-
|
|
157
|
-
|
|
154
|
+
chunks = self._attach_callbacks(self._offset_chunks(chunks, offset))
|
|
155
|
+
|
|
156
|
+
headers = {
|
|
157
|
+
"Authorization": f"OAuth {self.user_access_token}",
|
|
158
|
+
"Offset": f"{offset}",
|
|
159
|
+
"X-Entity-Name": self.session_key,
|
|
160
|
+
"X-Entity-Type": self.MIME_BY_CLUSTER_TYPE[self.cluster_filetype],
|
|
161
|
+
}
|
|
162
|
+
url = f"{MAPILLARY_UPLOAD_ENDPOINT}/{self.session_key}"
|
|
163
|
+
LOG.debug("POST %s HEADERS %s", url, json.dumps(_sanitize_headers(headers)))
|
|
164
|
+
resp = request_post(
|
|
165
|
+
url,
|
|
166
|
+
headers=headers,
|
|
167
|
+
data=chunks,
|
|
168
|
+
timeout=UPLOAD_REQUESTS_TIMEOUT,
|
|
158
169
|
)
|
|
170
|
+
LOG.debug("HTTP response %s: %s", resp.status_code, _truncate_end(resp.content))
|
|
159
171
|
|
|
160
172
|
payload = resp.json()
|
|
161
173
|
try:
|
|
@@ -209,35 +221,30 @@ class FakeUploadService(UploadService):
|
|
|
209
221
|
)
|
|
210
222
|
self._error_ratio = 0.1
|
|
211
223
|
|
|
212
|
-
def
|
|
224
|
+
def upload_chunks(
|
|
213
225
|
self,
|
|
214
|
-
|
|
226
|
+
chunks: T.Iterable[bytes],
|
|
215
227
|
offset: T.Optional[int] = None,
|
|
216
228
|
) -> str:
|
|
217
229
|
if offset is None:
|
|
218
230
|
offset = self.fetch_offset()
|
|
231
|
+
|
|
232
|
+
chunks = self._attach_callbacks(self._offset_chunks(chunks, offset))
|
|
233
|
+
|
|
219
234
|
os.makedirs(self._upload_path, exist_ok=True)
|
|
220
235
|
filename = os.path.join(self._upload_path, self.session_key)
|
|
221
236
|
with open(filename, "ab") as fp:
|
|
222
|
-
|
|
223
|
-
while True:
|
|
224
|
-
chunk = data.read(self.chunk_size)
|
|
225
|
-
if not chunk:
|
|
226
|
-
break
|
|
227
|
-
# fail here means nothing uploaded
|
|
237
|
+
for chunk in chunks:
|
|
228
238
|
if random.random() <= self._error_ratio:
|
|
229
239
|
raise requests.ConnectionError(
|
|
230
240
|
f"TEST ONLY: Failed to upload with error ratio {self._error_ratio}"
|
|
231
241
|
)
|
|
232
242
|
fp.write(chunk)
|
|
233
|
-
# fail here means patially uploaded
|
|
234
243
|
if random.random() <= self._error_ratio:
|
|
235
244
|
raise requests.ConnectionError(
|
|
236
245
|
f"TEST ONLY: Partially uploaded with error ratio {self._error_ratio}"
|
|
237
246
|
)
|
|
238
|
-
|
|
239
|
-
callback(chunk, None)
|
|
240
|
-
return self.session_key
|
|
247
|
+
return uuid.uuid4().hex
|
|
241
248
|
|
|
242
249
|
def finish(self, _: str) -> str:
|
|
243
250
|
return "0"
|
mapillary_tools/uploader.py
CHANGED
|
@@ -195,7 +195,6 @@ class Uploader:
|
|
|
195
195
|
upload_api_v4.FakeUploadService(
|
|
196
196
|
user_access_token=self.user_items["user_upload_token"],
|
|
197
197
|
session_key=session_key,
|
|
198
|
-
entity_size=entity_size,
|
|
199
198
|
organization_id=self.user_items.get("MAPOrganizationKey"),
|
|
200
199
|
cluster_filetype=cluster_filetype,
|
|
201
200
|
chunk_size=self.chunk_size,
|
|
@@ -205,7 +204,6 @@ class Uploader:
|
|
|
205
204
|
upload_service = upload_api_v4.UploadService(
|
|
206
205
|
user_access_token=self.user_items["user_upload_token"],
|
|
207
206
|
session_key=session_key,
|
|
208
|
-
entity_size=entity_size,
|
|
209
207
|
organization_id=self.user_items.get("MAPOrganizationKey"),
|
|
210
208
|
cluster_filetype=cluster_filetype,
|
|
211
209
|
chunk_size=self.chunk_size,
|
|
@@ -1,8 +1,8 @@
|
|
|
1
|
-
mapillary_tools/__init__.py,sha256=
|
|
1
|
+
mapillary_tools/__init__.py,sha256=j5y1cIo14OOuS27ROUABVHN9119rEXV-bdoM-z117o8,21
|
|
2
2
|
mapillary_tools/api_v4.py,sha256=zhRtgx3EnzgqtjziRhvFq3ONvsPaB9hROsuKFcf_pFo,5197
|
|
3
3
|
mapillary_tools/authenticate.py,sha256=LCFcs6LqZmXaYkTUEKgGfmqytWdh5v_L3KXB48ojOZ4,3090
|
|
4
4
|
mapillary_tools/config.py,sha256=jCjaK4jJaTY4AV4qf_b_tcxn5LA_uPsEWlGIdm2zw6g,2103
|
|
5
|
-
mapillary_tools/constants.py,sha256=
|
|
5
|
+
mapillary_tools/constants.py,sha256=KKQMwzpN2z7wka6lhZv-agdxNldaEQqvQXu3Kg5XuUk,2453
|
|
6
6
|
mapillary_tools/exceptions.py,sha256=Mh1tgVEFTSMnYEzrl9x7b95fW9Z3SPVD_YMEl7r8I0I,2693
|
|
7
7
|
mapillary_tools/exif_read.py,sha256=F60A0-T8XSBHvFKgVIrUz_ZWKQrTFWrtj3c6siB0IMg,28707
|
|
8
8
|
mapillary_tools/exif_write.py,sha256=3PawLnBOY8Z86TYiA_F4LxRhe5Ui6CTNhxYm9yeJNX8,8786
|
|
@@ -18,8 +18,8 @@ mapillary_tools/sample_video.py,sha256=dpdX7bUNEmcrz-3gh3Y3awnTDX66pChbTKuF8qGfe
|
|
|
18
18
|
mapillary_tools/telemetry.py,sha256=WpBGPF_GMPjM_EFqXIutFtpDFL9wj7yEzGNGnfQZUo8,1255
|
|
19
19
|
mapillary_tools/types.py,sha256=6kww2UdKM6YzabYbc862BYzEWtxL2hhxCRFfeDiUtF0,22074
|
|
20
20
|
mapillary_tools/upload.py,sha256=8dQ3ZWsjau1_xZN3ssjGGkBnLKbKIhjC91-zWstYlD8,24439
|
|
21
|
-
mapillary_tools/upload_api_v4.py,sha256=
|
|
22
|
-
mapillary_tools/uploader.py,sha256=
|
|
21
|
+
mapillary_tools/upload_api_v4.py,sha256=VXIAA_lar4y4RgvNuKpkE7CVl4uWa6kNT59hCVFClSk,8490
|
|
22
|
+
mapillary_tools/uploader.py,sha256=DBHso4QIP5nsZFDynLjkUvytzwpEOPnOlfeDyA6rTBk,14007
|
|
23
23
|
mapillary_tools/utils.py,sha256=VNtK1tAb3Hh8y3P5e5Y3iewREkIoLDa3C2myRYcF2lY,5970
|
|
24
24
|
mapillary_tools/camm/camm_builder.py,sha256=TXZfhu3xGjtrLEWnB14D7aSOrHOoSJef24YSLApiIfY,10631
|
|
25
25
|
mapillary_tools/camm/camm_parser.py,sha256=RaCWeLvS_AyHD6B6wDUu9DAsdfByVHMAPTqEqjtFibE,9734
|
|
@@ -67,9 +67,9 @@ mapillary_tools/video_data_extraction/extractors/generic_video_parser.py,sha256=
|
|
|
67
67
|
mapillary_tools/video_data_extraction/extractors/gopro_parser.py,sha256=IVnTyquSraTUaG9rxbJfVWc1-drdY5PaHn5urh3IBk4,1325
|
|
68
68
|
mapillary_tools/video_data_extraction/extractors/gpx_parser.py,sha256=FNrdnXl48k8I1I5fGwYsClhfFEHVsooRLRboUYECv3I,3811
|
|
69
69
|
mapillary_tools/video_data_extraction/extractors/nmea_parser.py,sha256=raSXavBvP-0LJCB_TwLL0mOv2uHSsB744igTsaKAaGc,658
|
|
70
|
-
mapillary_tools-0.13.
|
|
71
|
-
mapillary_tools-0.13.
|
|
72
|
-
mapillary_tools-0.13.
|
|
73
|
-
mapillary_tools-0.13.
|
|
74
|
-
mapillary_tools-0.13.
|
|
75
|
-
mapillary_tools-0.13.
|
|
70
|
+
mapillary_tools-0.13.3a1.dist-info/LICENSE,sha256=l2D8cKfFmmJq_wcVq_JElPJrlvWQOzNWx7gMLINucxc,1292
|
|
71
|
+
mapillary_tools-0.13.3a1.dist-info/METADATA,sha256=3wyt_Iv96Md1JNY358MhdTqO02bb0XKdEoDBMiP3_yw,19760
|
|
72
|
+
mapillary_tools-0.13.3a1.dist-info/WHEEL,sha256=jB7zZ3N9hIM9adW7qlTAyycLYW9npaWKLRzaoVcLKcM,91
|
|
73
|
+
mapillary_tools-0.13.3a1.dist-info/entry_points.txt,sha256=A3f3LP-BO_P-U8Y29QfpT4jx6Mjk3sXjTi2Yew4bvj8,75
|
|
74
|
+
mapillary_tools-0.13.3a1.dist-info/top_level.txt,sha256=FbDkMgOrt1S70ho1WSBrOwzKOSkJFDwwqFOoY5-527s,16
|
|
75
|
+
mapillary_tools-0.13.3a1.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|