learning-loop-node 0.13.7__py3-none-any.whl → 0.15.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of learning-loop-node might be problematic. Click here for more details.
- learning_loop_node/data_classes/__init__.py +2 -2
- learning_loop_node/data_classes/image_metadata.py +5 -0
- learning_loop_node/data_classes/training.py +3 -2
- learning_loop_node/data_exchanger.py +3 -3
- learning_loop_node/detector/detector_logic.py +8 -5
- learning_loop_node/detector/detector_node.py +105 -44
- learning_loop_node/detector/inbox_filter/relevance_filter.py +11 -9
- learning_loop_node/detector/outbox.py +134 -44
- learning_loop_node/detector/rest/detect.py +3 -3
- learning_loop_node/detector/rest/upload.py +4 -3
- learning_loop_node/helpers/background_tasks.py +78 -0
- learning_loop_node/helpers/run.py +21 -0
- learning_loop_node/node.py +11 -4
- learning_loop_node/tests/annotator/conftest.py +9 -4
- learning_loop_node/tests/annotator/test_annotator_node.py +10 -2
- learning_loop_node/tests/detector/inbox_filter/test_unexpected_observations_count.py +4 -3
- learning_loop_node/tests/detector/test_client_communication.py +1 -23
- learning_loop_node/tests/detector/test_outbox.py +7 -16
- learning_loop_node/tests/detector/test_relevance_filter.py +3 -3
- learning_loop_node/tests/general/conftest.py +8 -2
- learning_loop_node/tests/trainer/conftest.py +2 -2
- learning_loop_node/trainer/trainer_logic_generic.py +16 -4
- {learning_loop_node-0.13.7.dist-info → learning_loop_node-0.15.0.dist-info}/METADATA +35 -38
- {learning_loop_node-0.13.7.dist-info → learning_loop_node-0.15.0.dist-info}/RECORD +25 -23
- {learning_loop_node-0.13.7.dist-info → learning_loop_node-0.15.0.dist-info}/WHEEL +0 -0
|
@@ -5,13 +5,15 @@ import logging
|
|
|
5
5
|
import os
|
|
6
6
|
import shutil
|
|
7
7
|
from asyncio import Task
|
|
8
|
+
from collections import deque
|
|
8
9
|
from dataclasses import asdict
|
|
9
10
|
from datetime import datetime
|
|
10
11
|
from glob import glob
|
|
11
12
|
from io import BufferedReader, TextIOWrapper
|
|
12
13
|
from multiprocessing import Event
|
|
13
14
|
from multiprocessing.synchronize import Event as SyncEvent
|
|
14
|
-
from
|
|
15
|
+
from threading import Lock
|
|
16
|
+
from typing import List, Optional, Tuple, TypeVar, Union
|
|
15
17
|
|
|
16
18
|
import aiohttp
|
|
17
19
|
import PIL
|
|
@@ -21,14 +23,27 @@ from fastapi.encoders import jsonable_encoder
|
|
|
21
23
|
from ..data_classes import ImageMetadata
|
|
22
24
|
from ..enums import OutboxMode
|
|
23
25
|
from ..globals import GLOBALS
|
|
24
|
-
from ..helpers import environment_reader
|
|
26
|
+
from ..helpers import environment_reader, run
|
|
27
|
+
|
|
28
|
+
T = TypeVar('T')
|
|
25
29
|
|
|
26
30
|
|
|
27
31
|
class Outbox():
|
|
32
|
+
"""
|
|
33
|
+
Outbox is a class that handles the uploading of images to the learning loop.
|
|
34
|
+
It uploads images from an internal queue (lifo) in batches of 20 every 5 seconds.
|
|
35
|
+
It handles upload failures by splitting the upload into two smaller batches until the problematic image is identified - and removed.
|
|
36
|
+
Any image can be saved to the normal or the priority queue.
|
|
37
|
+
Images in the priority queue are uploaded first.
|
|
38
|
+
The total queue length is limited to 1000 images.
|
|
39
|
+
"""
|
|
40
|
+
|
|
28
41
|
def __init__(self) -> None:
|
|
29
42
|
self.log = logging.getLogger()
|
|
30
43
|
self.path = f'{GLOBALS.data_folder}/outbox'
|
|
31
44
|
os.makedirs(self.path, exist_ok=True)
|
|
45
|
+
os.makedirs(f'{self.path}/priority', exist_ok=True)
|
|
46
|
+
os.makedirs(f'{self.path}/normal', exist_ok=True)
|
|
32
47
|
|
|
33
48
|
self.log = logging.getLogger()
|
|
34
49
|
host = environment_reader.host()
|
|
@@ -42,6 +57,8 @@ class Outbox():
|
|
|
42
57
|
self.log.info('Outbox initialized with target_uri: %s', self.target_uri)
|
|
43
58
|
|
|
44
59
|
self.BATCH_SIZE = 20
|
|
60
|
+
self.MAX_UPLOAD_LENGTH = 1000 # only affects the `upload_folders` list
|
|
61
|
+
self.UPLOAD_INTERVAL_S = 5
|
|
45
62
|
self.UPLOAD_TIMEOUT_S = 30
|
|
46
63
|
|
|
47
64
|
self.shutdown_event: SyncEvent = Event()
|
|
@@ -49,15 +66,24 @@ class Outbox():
|
|
|
49
66
|
|
|
50
67
|
self.upload_counter = 0
|
|
51
68
|
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
69
|
+
self.priority_upload_folders: List[str] = []
|
|
70
|
+
self.upload_folders: deque[str] = deque()
|
|
71
|
+
self.folders_lock = Lock()
|
|
72
|
+
|
|
73
|
+
for file in glob(f'{self.path}/priority/*'):
|
|
74
|
+
self.priority_upload_folders.append(file)
|
|
75
|
+
for file in glob(f'{self.path}/normal/*'):
|
|
76
|
+
self.upload_folders.append(file)
|
|
59
77
|
|
|
60
|
-
|
|
78
|
+
async def save(self,
|
|
79
|
+
image: bytes,
|
|
80
|
+
image_metadata: Optional[ImageMetadata] = None,
|
|
81
|
+
tags: Optional[List[str]] = None,
|
|
82
|
+
source: Optional[str] = None,
|
|
83
|
+
creation_date: Optional[str] = None,
|
|
84
|
+
upload_priority: bool = False) -> None:
|
|
85
|
+
|
|
86
|
+
if not await run.io_bound(self._is_valid_jpg, image):
|
|
61
87
|
self.log.error('Invalid jpg image')
|
|
62
88
|
return
|
|
63
89
|
|
|
@@ -66,9 +92,33 @@ class Outbox():
|
|
|
66
92
|
if not tags:
|
|
67
93
|
tags = []
|
|
68
94
|
identifier = datetime.now().isoformat(sep='_', timespec='microseconds')
|
|
69
|
-
|
|
70
|
-
|
|
95
|
+
|
|
96
|
+
try:
|
|
97
|
+
await run.io_bound(self._save_files_to_disk, identifier, image, image_metadata, tags, source, creation_date, upload_priority)
|
|
98
|
+
except Exception as e:
|
|
99
|
+
self.log.error('Failed to save files for image %s: %s', identifier, e)
|
|
71
100
|
return
|
|
101
|
+
|
|
102
|
+
if upload_priority:
|
|
103
|
+
self.priority_upload_folders.append(f'{self.path}/priority/{identifier}')
|
|
104
|
+
else:
|
|
105
|
+
self.upload_folders.appendleft(f'{self.path}/normal/{identifier}')
|
|
106
|
+
|
|
107
|
+
await self._trim_upload_queue()
|
|
108
|
+
|
|
109
|
+
def _save_files_to_disk(self,
|
|
110
|
+
identifier: str,
|
|
111
|
+
image: bytes,
|
|
112
|
+
image_metadata: ImageMetadata,
|
|
113
|
+
tags: List[str],
|
|
114
|
+
source: Optional[str],
|
|
115
|
+
creation_date: Optional[str],
|
|
116
|
+
upload_priority: bool) -> None:
|
|
117
|
+
subpath = 'priority' if upload_priority else 'normal'
|
|
118
|
+
full_path = f'{self.path}/{subpath}/{identifier}'
|
|
119
|
+
if os.path.exists(full_path):
|
|
120
|
+
raise FileExistsError(f'Directory with identifier {identifier} already exists')
|
|
121
|
+
|
|
72
122
|
tmp = f'{GLOBALS.data_folder}/tmp/{identifier}'
|
|
73
123
|
image_metadata.tags = tags
|
|
74
124
|
if self._is_valid_isoformat(creation_date):
|
|
@@ -77,6 +127,7 @@ class Outbox():
|
|
|
77
127
|
image_metadata.created = identifier
|
|
78
128
|
|
|
79
129
|
image_metadata.source = source or 'unknown'
|
|
130
|
+
|
|
80
131
|
os.makedirs(tmp, exist_ok=True)
|
|
81
132
|
|
|
82
133
|
with open(tmp + f'/image_{identifier}.json', 'w') as f:
|
|
@@ -85,10 +136,34 @@ class Outbox():
|
|
|
85
136
|
with open(tmp + f'/image_{identifier}.jpg', 'wb') as f:
|
|
86
137
|
f.write(image)
|
|
87
138
|
|
|
88
|
-
if os.path.exists(tmp):
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
139
|
+
if not os.path.exists(tmp):
|
|
140
|
+
self.log.error('Could not rename %s to %s', tmp, full_path)
|
|
141
|
+
raise FileNotFoundError(f'Could not rename {tmp} to {full_path}')
|
|
142
|
+
os.rename(tmp, full_path)
|
|
143
|
+
|
|
144
|
+
async def _trim_upload_queue(self) -> None:
|
|
145
|
+
if len(self.upload_folders) > self.MAX_UPLOAD_LENGTH:
|
|
146
|
+
excess = len(self.upload_folders) - self.MAX_UPLOAD_LENGTH
|
|
147
|
+
self.log.info('Dropping %s images from upload list', excess)
|
|
148
|
+
|
|
149
|
+
folders_to_delete = []
|
|
150
|
+
for _ in range(excess):
|
|
151
|
+
if self.upload_folders:
|
|
152
|
+
try:
|
|
153
|
+
folder = self.upload_folders.pop()
|
|
154
|
+
folders_to_delete.append(folder)
|
|
155
|
+
except Exception:
|
|
156
|
+
self.log.exception('Failed to get item from upload_folders')
|
|
157
|
+
|
|
158
|
+
await run.io_bound(self._delete_folders, folders_to_delete)
|
|
159
|
+
|
|
160
|
+
def _delete_folders(self, folders_to_delete: List[str]) -> None:
|
|
161
|
+
for folder in folders_to_delete:
|
|
162
|
+
try:
|
|
163
|
+
shutil.rmtree(folder)
|
|
164
|
+
self.log.debug('Deleted %s', folder)
|
|
165
|
+
except Exception:
|
|
166
|
+
self.log.exception('Failed to delete %s', folder)
|
|
92
167
|
|
|
93
168
|
def _is_valid_isoformat(self, date: Optional[str]) -> bool:
|
|
94
169
|
if date is None:
|
|
@@ -99,10 +174,11 @@ class Outbox():
|
|
|
99
174
|
except Exception:
|
|
100
175
|
return False
|
|
101
176
|
|
|
102
|
-
def
|
|
103
|
-
|
|
177
|
+
def get_upload_folders(self) -> List[str]:
|
|
178
|
+
with self.folders_lock:
|
|
179
|
+
return self.priority_upload_folders + list(self.upload_folders)
|
|
104
180
|
|
|
105
|
-
def ensure_continuous_upload(self):
|
|
181
|
+
def ensure_continuous_upload(self) -> None:
|
|
106
182
|
self.log.debug('start_continuous_upload')
|
|
107
183
|
if self._upload_process_alive():
|
|
108
184
|
self.log.debug('Upload thread already running')
|
|
@@ -111,44 +187,58 @@ class Outbox():
|
|
|
111
187
|
self.shutdown_event.clear()
|
|
112
188
|
self.upload_task = asyncio.create_task(self._continuous_upload())
|
|
113
189
|
|
|
114
|
-
async def _continuous_upload(self):
|
|
190
|
+
async def _continuous_upload(self) -> None:
|
|
115
191
|
self.log.info('continuous upload started')
|
|
116
192
|
assert self.shutdown_event is not None
|
|
117
193
|
while not self.shutdown_event.is_set():
|
|
118
194
|
await self.upload()
|
|
119
|
-
await asyncio.sleep(
|
|
195
|
+
await asyncio.sleep(self.UPLOAD_INTERVAL_S)
|
|
120
196
|
self.log.info('continuous upload ended')
|
|
121
197
|
|
|
122
|
-
async def upload(self):
|
|
123
|
-
items = self.
|
|
198
|
+
async def upload(self) -> None:
|
|
199
|
+
items = self.get_upload_folders()
|
|
124
200
|
if not items:
|
|
125
201
|
self.log.debug('No images found to upload')
|
|
126
202
|
return
|
|
127
203
|
|
|
128
204
|
self.log.info('Found %s images to upload', len(items))
|
|
129
|
-
for i in range(0, len(items), self.BATCH_SIZE):
|
|
130
|
-
batch_items = items[i:i+self.BATCH_SIZE]
|
|
131
|
-
if self.shutdown_event.is_set():
|
|
132
|
-
break
|
|
133
|
-
try:
|
|
134
|
-
await self._upload_batch(batch_items)
|
|
135
|
-
except Exception:
|
|
136
|
-
self.log.exception('Could not upload files')
|
|
137
205
|
|
|
138
|
-
|
|
206
|
+
batch_items = items[:self.BATCH_SIZE]
|
|
207
|
+
try:
|
|
208
|
+
await self._upload_batch(batch_items)
|
|
209
|
+
except Exception:
|
|
210
|
+
self.log.exception('Could not upload files')
|
|
139
211
|
|
|
140
|
-
|
|
141
|
-
|
|
212
|
+
async def _clear_item(self, item: str) -> None:
|
|
213
|
+
try:
|
|
214
|
+
if item in self.upload_folders:
|
|
215
|
+
self.upload_folders.remove(item)
|
|
216
|
+
if item in self.priority_upload_folders:
|
|
217
|
+
self.priority_upload_folders.remove(item)
|
|
218
|
+
await run.io_bound(shutil.rmtree, item, ignore_errors=True)
|
|
219
|
+
self.log.debug('Deleted %s', item)
|
|
220
|
+
except Exception:
|
|
221
|
+
self.log.exception('Failed to delete %s', item)
|
|
222
|
+
|
|
223
|
+
async def _upload_batch(self, items: List[str]) -> None:
|
|
224
|
+
"""
|
|
225
|
+
Uploads a batch of images to the server.
|
|
226
|
+
:param items: List of folders to upload (each folder contains an image and a metadata file)
|
|
227
|
+
"""
|
|
142
228
|
|
|
143
229
|
data: List[Tuple[str, Union[TextIOWrapper, BufferedReader]]] = []
|
|
144
230
|
for item in items:
|
|
231
|
+
if not os.path.exists(item):
|
|
232
|
+
await self._clear_item(item)
|
|
233
|
+
continue
|
|
145
234
|
identifier = os.path.basename(item)
|
|
146
235
|
data.append(('files', open(f'{item}/image_{identifier}.json', 'r')))
|
|
147
236
|
data.append(('files', open(f'{item}/image_{identifier}.jpg', 'rb')))
|
|
148
237
|
|
|
149
238
|
try:
|
|
150
239
|
async with aiohttp.ClientSession() as session:
|
|
151
|
-
response = await session.post(self.target_uri, data=data, timeout=self.UPLOAD_TIMEOUT_S)
|
|
240
|
+
response = await session.post(self.target_uri, data=data, timeout=aiohttp.ClientTimeout(total=self.UPLOAD_TIMEOUT_S))
|
|
241
|
+
await response.read()
|
|
152
242
|
except Exception:
|
|
153
243
|
self.log.exception('Could not upload images')
|
|
154
244
|
return
|
|
@@ -159,23 +249,23 @@ class Outbox():
|
|
|
159
249
|
|
|
160
250
|
if response.status == 200:
|
|
161
251
|
self.upload_counter += len(items)
|
|
252
|
+
self.log.debug('Uploaded %s images', len(items))
|
|
162
253
|
for item in items:
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
self.log.info('Uploaded %s images successfully', len(items))
|
|
169
|
-
|
|
170
|
-
elif response.status == 422:
|
|
254
|
+
await self._clear_item(item)
|
|
255
|
+
self.log.debug('Cleared %s images', len(items))
|
|
256
|
+
return
|
|
257
|
+
|
|
258
|
+
if response.status == 422:
|
|
171
259
|
if len(items) == 1:
|
|
172
260
|
self.log.error('Broken content in image: %s\n Skipping.', items[0])
|
|
173
|
-
|
|
261
|
+
await self._clear_item(items[0])
|
|
174
262
|
return
|
|
175
263
|
|
|
176
264
|
self.log.exception('Broken content in batch. Splitting and retrying')
|
|
177
265
|
await self._upload_batch(items[:len(items)//2])
|
|
178
266
|
await self._upload_batch(items[len(items)//2:])
|
|
267
|
+
elif response.status == 429:
|
|
268
|
+
self.log.warning('Too many requests: %s', response.content)
|
|
179
269
|
else:
|
|
180
270
|
self.log.error('Could not upload images: %s', response.content)
|
|
181
271
|
|
|
@@ -1,7 +1,6 @@
|
|
|
1
1
|
import logging
|
|
2
2
|
from typing import TYPE_CHECKING, Optional
|
|
3
3
|
|
|
4
|
-
import numpy as np
|
|
5
4
|
from fastapi import APIRouter, File, Header, Request, UploadFile
|
|
6
5
|
|
|
7
6
|
from ...data_classes.image_metadata import ImageMetadata
|
|
@@ -35,14 +34,15 @@ async def http_detect(
|
|
|
35
34
|
|
|
36
35
|
"""
|
|
37
36
|
try:
|
|
38
|
-
|
|
37
|
+
# Read file directly to bytes instead of using numpy
|
|
38
|
+
file_bytes = file.file.read()
|
|
39
39
|
except Exception as exc:
|
|
40
40
|
logging.exception('Error during reading of image %s.', file.filename)
|
|
41
41
|
raise Exception(f'Uploaded file {file.filename} is no image file.') from exc
|
|
42
42
|
|
|
43
43
|
try:
|
|
44
44
|
app: 'DetectorNode' = request.app
|
|
45
|
-
detections = await app.get_detections(raw_image=
|
|
45
|
+
detections = await app.get_detections(raw_image=file_bytes,
|
|
46
46
|
camera_id=camera_id or mac or None,
|
|
47
47
|
tags=tags.split(',') if tags else [],
|
|
48
48
|
source=source,
|
|
@@ -12,7 +12,8 @@ router = APIRouter()
|
|
|
12
12
|
async def upload_image(request: Request,
|
|
13
13
|
files: List[UploadFile] = File(...),
|
|
14
14
|
source: Optional[str] = Query(None, description='Source of the image'),
|
|
15
|
-
creation_date: Optional[str] = Query(None, description='Creation date of the image')
|
|
15
|
+
creation_date: Optional[str] = Query(None, description='Creation date of the image'),
|
|
16
|
+
upload_priority: bool = Query(False, description='Upload the image with priority')):
|
|
16
17
|
"""
|
|
17
18
|
Upload an image or multiple images to the learning loop.
|
|
18
19
|
|
|
@@ -21,9 +22,9 @@ async def upload_image(request: Request,
|
|
|
21
22
|
|
|
22
23
|
Example Usage
|
|
23
24
|
|
|
24
|
-
curl -X POST -F 'files=@test.jpg' "http://localhost:/upload?source=test&creation_date=2024-01-01T00:00:00"
|
|
25
|
+
curl -X POST -F 'files=@test.jpg' "http://localhost:/upload?source=test&creation_date=2024-01-01T00:00:00&upload_priority=true"
|
|
25
26
|
"""
|
|
26
27
|
raw_files = [await file.read() for file in files]
|
|
27
28
|
node: DetectorNode = request.app
|
|
28
|
-
await node.upload_images(raw_files, source, creation_date)
|
|
29
|
+
await node.upload_images(images=raw_files, source=source, creation_date=creation_date, upload_priority=upload_priority)
|
|
29
30
|
return 200, "OK"
|
|
@@ -0,0 +1,78 @@
|
|
|
1
|
+
# Copy of Nicegui background_tasks.py
|
|
2
|
+
# MIT License
|
|
3
|
+
|
|
4
|
+
# Copyright (c) 2021 Zauberzeug GmbH
|
|
5
|
+
|
|
6
|
+
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
7
|
+
# of this software and associated documentation files (the "Software"), to deal
|
|
8
|
+
# in the Software without restriction, including without limitation the rights
|
|
9
|
+
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
10
|
+
# copies of the Software, and to permit persons to whom the Software is
|
|
11
|
+
# furnished to do so, subject to the following conditions:
|
|
12
|
+
|
|
13
|
+
# The above copyright notice and this permission notice shall be included in all
|
|
14
|
+
# copies or substantial portions of the Software.
|
|
15
|
+
|
|
16
|
+
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
17
|
+
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
18
|
+
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
19
|
+
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
20
|
+
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
21
|
+
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
22
|
+
# SOFTWARE.
|
|
23
|
+
|
|
24
|
+
"""inspired from https://quantlane.com/blog/ensure-asyncio-task-exceptions-get-logged/"""
|
|
25
|
+
from __future__ import annotations
|
|
26
|
+
|
|
27
|
+
import asyncio
|
|
28
|
+
import logging
|
|
29
|
+
from typing import Awaitable, Dict, Set
|
|
30
|
+
|
|
31
|
+
running_tasks: Set[asyncio.Task] = set()
|
|
32
|
+
lazy_tasks_running: Dict[str, asyncio.Task] = {}
|
|
33
|
+
lazy_tasks_waiting: Dict[str, Awaitable] = {}
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
def create(coroutine: Awaitable, *, name: str = 'unnamed task') -> asyncio.Task:
|
|
37
|
+
"""Wraps a loop.create_task call and ensures there is an exception handler added to the task.
|
|
38
|
+
|
|
39
|
+
If the task raises an exception, it is logged and handled by the global exception handlers.
|
|
40
|
+
Also a reference to the task is kept until it is done, so that the task is not garbage collected mid-execution.
|
|
41
|
+
See https://docs.python.org/3/library/asyncio-task.html#asyncio.create_task.
|
|
42
|
+
"""
|
|
43
|
+
loop = asyncio.get_event_loop()
|
|
44
|
+
coroutine = coroutine if asyncio.iscoroutine(coroutine) else asyncio.wait_for(coroutine, None)
|
|
45
|
+
task: asyncio.Task = loop.create_task(coroutine, name=name)
|
|
46
|
+
task.add_done_callback(_handle_task_result)
|
|
47
|
+
running_tasks.add(task)
|
|
48
|
+
task.add_done_callback(running_tasks.discard)
|
|
49
|
+
return task
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
def create_lazy(coroutine: Awaitable, *, name: str) -> None:
|
|
53
|
+
"""Wraps a create call and ensures a second task with the same name is delayed until the first one is done.
|
|
54
|
+
|
|
55
|
+
If a third task with the same name is created while the first one is still running, the second one is discarded.
|
|
56
|
+
"""
|
|
57
|
+
if name in lazy_tasks_running:
|
|
58
|
+
if name in lazy_tasks_waiting:
|
|
59
|
+
asyncio.Task(lazy_tasks_waiting[name]).cancel()
|
|
60
|
+
lazy_tasks_waiting[name] = coroutine
|
|
61
|
+
return
|
|
62
|
+
|
|
63
|
+
def finalize(name: str) -> None:
|
|
64
|
+
lazy_tasks_running.pop(name)
|
|
65
|
+
if name in lazy_tasks_waiting:
|
|
66
|
+
create_lazy(lazy_tasks_waiting.pop(name), name=name)
|
|
67
|
+
task = create(coroutine, name=name)
|
|
68
|
+
lazy_tasks_running[name] = task
|
|
69
|
+
task.add_done_callback(lambda _: finalize(name))
|
|
70
|
+
|
|
71
|
+
|
|
72
|
+
def _handle_task_result(task: asyncio.Task) -> None:
|
|
73
|
+
try:
|
|
74
|
+
task.result()
|
|
75
|
+
except asyncio.CancelledError:
|
|
76
|
+
pass
|
|
77
|
+
except Exception:
|
|
78
|
+
logging.exception('Background task %s raised an exception', task.get_name())
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
import sys
|
|
3
|
+
from typing import Any, Callable, TypeVar
|
|
4
|
+
|
|
5
|
+
T = TypeVar('T')
|
|
6
|
+
|
|
7
|
+
if sys.version_info >= (3, 10):
|
|
8
|
+
from typing import ParamSpec
|
|
9
|
+
P = ParamSpec('P')
|
|
10
|
+
|
|
11
|
+
async def io_bound(func: Callable[P, T], *args: P.args, **kwargs: P.kwargs) -> T:
|
|
12
|
+
"""Run a blocking function in a thread pool executor.
|
|
13
|
+
This is useful for disk I/O operations that would block the event loop."""
|
|
14
|
+
loop = asyncio.get_event_loop()
|
|
15
|
+
return await loop.run_in_executor(None, lambda: func(*args, **kwargs))
|
|
16
|
+
else:
|
|
17
|
+
async def io_bound(func: Callable[..., T], *args: Any, **kwargs: Any) -> T:
|
|
18
|
+
"""Run a blocking function in a thread pool executor.
|
|
19
|
+
This is useful for disk I/O operations that would block the event loop."""
|
|
20
|
+
loop = asyncio.get_event_loop()
|
|
21
|
+
return await loop.run_in_executor(None, lambda: func(*args, **kwargs))
|
learning_loop_node/node.py
CHANGED
|
@@ -76,6 +76,8 @@ class Node(FastAPI):
|
|
|
76
76
|
self.previous_state: Optional[str] = None
|
|
77
77
|
self.repeat_loop_cycle_sec = 5
|
|
78
78
|
|
|
79
|
+
self._client_session: Optional[aiohttp.ClientSession] = None
|
|
80
|
+
|
|
79
81
|
def log_status_on_change(self, current_state_str: str, full_status: Any):
|
|
80
82
|
if self.previous_state != current_state_str:
|
|
81
83
|
self.previous_state = current_state_str
|
|
@@ -127,6 +129,8 @@ class Node(FastAPI):
|
|
|
127
129
|
await self.loop_communicator.shutdown()
|
|
128
130
|
if self._sio_client is not None:
|
|
129
131
|
await self._sio_client.disconnect()
|
|
132
|
+
if self._client_session is not None:
|
|
133
|
+
await self._client_session.close()
|
|
130
134
|
self.log.info('successfully disconnected from loop.')
|
|
131
135
|
await self.on_shutdown()
|
|
132
136
|
|
|
@@ -205,12 +209,15 @@ class Node(FastAPI):
|
|
|
205
209
|
ssl_context.verify_mode = ssl.CERT_REQUIRED
|
|
206
210
|
connector = TCPConnector(ssl=ssl_context)
|
|
207
211
|
|
|
212
|
+
if self._client_session is not None:
|
|
213
|
+
await self._client_session.close()
|
|
214
|
+
|
|
208
215
|
if self.needs_login:
|
|
209
|
-
self.
|
|
210
|
-
cookies=cookies, connector=connector))
|
|
216
|
+
self._client_session = aiohttp.ClientSession(cookies=cookies, connector=connector)
|
|
211
217
|
else:
|
|
212
|
-
self.
|
|
213
|
-
|
|
218
|
+
self._client_session = aiohttp.ClientSession(connector=connector)
|
|
219
|
+
|
|
220
|
+
self._sio_client = AsyncClient(request_timeout=20, http_session=self._client_session)
|
|
214
221
|
|
|
215
222
|
# pylint: disable=protected-access
|
|
216
223
|
self._sio_client._trigger_event = ensure_socket_response(self._sio_client._trigger_event)
|
|
@@ -3,18 +3,23 @@ import logging
|
|
|
3
3
|
import os
|
|
4
4
|
import shutil
|
|
5
5
|
|
|
6
|
+
# ====================================== REDUNDANT FIXTURES IN ALL CONFTESTS ! ======================================
|
|
7
|
+
import sys
|
|
8
|
+
|
|
6
9
|
import pytest
|
|
7
10
|
|
|
8
11
|
from ...globals import GLOBALS
|
|
9
12
|
from ...loop_communication import LoopCommunicator
|
|
10
13
|
|
|
11
|
-
# ====================================== REDUNDANT FIXTURES IN ALL CONFTESTS ! ======================================
|
|
12
|
-
|
|
13
14
|
|
|
14
15
|
@pytest.fixture()
|
|
15
16
|
async def setup_test_project(): # pylint: disable=redefined-outer-name
|
|
16
17
|
loop_communicator = LoopCommunicator()
|
|
17
|
-
|
|
18
|
+
try:
|
|
19
|
+
await loop_communicator.delete("/zauberzeug/projects/pytest_nodelib_annotator?keep_images=true", timeout=10)
|
|
20
|
+
except Exception:
|
|
21
|
+
logging.warning("Failed to delete project pytest_nodelib_annotator")
|
|
22
|
+
sys.exit(1)
|
|
18
23
|
await asyncio.sleep(1)
|
|
19
24
|
project_conf = {
|
|
20
25
|
'project_name': 'pytest_nodelib_annotator', 'inbox': 0, 'annotate': 0, 'review': 0, 'complete': 3, 'image_style': 'beautiful',
|
|
@@ -22,7 +27,7 @@ async def setup_test_project(): # pylint: disable=redefined-outer-name
|
|
|
22
27
|
'trainings': 1, 'box_detections': 3, 'box_annotations': 0}
|
|
23
28
|
assert (await loop_communicator.post("/zauberzeug/projects/generator", json=project_conf)).status_code == 200
|
|
24
29
|
yield
|
|
25
|
-
await loop_communicator.delete("/zauberzeug/projects/pytest_nodelib_annotator?keep_images=true")
|
|
30
|
+
await loop_communicator.delete("/zauberzeug/projects/pytest_nodelib_annotator?keep_images=true", timeout=10)
|
|
26
31
|
await loop_communicator.shutdown()
|
|
27
32
|
|
|
28
33
|
|
|
@@ -7,7 +7,14 @@ from fastapi.encoders import jsonable_encoder
|
|
|
7
7
|
|
|
8
8
|
from ...annotation.annotator_logic import AnnotatorLogic
|
|
9
9
|
from ...annotation.annotator_node import AnnotatorNode
|
|
10
|
-
from ...data_classes import
|
|
10
|
+
from ...data_classes import (
|
|
11
|
+
AnnotationData,
|
|
12
|
+
Category,
|
|
13
|
+
Context,
|
|
14
|
+
Point,
|
|
15
|
+
ToolOutput,
|
|
16
|
+
UserInput,
|
|
17
|
+
)
|
|
11
18
|
from ...enums import AnnotationEventType, CategoryType
|
|
12
19
|
|
|
13
20
|
|
|
@@ -37,7 +44,8 @@ def default_user_input() -> UserInput:
|
|
|
37
44
|
|
|
38
45
|
|
|
39
46
|
@pytest.mark.asyncio
|
|
40
|
-
|
|
47
|
+
@pytest.mark.usefixtures('setup_test_project')
|
|
48
|
+
async def test_image_download():
|
|
41
49
|
image_folder = '/tmp/learning_loop_lib_data/zauberzeug/pytest_nodelib_annotator/images'
|
|
42
50
|
|
|
43
51
|
assert os.path.exists(image_folder) is False or len(os.listdir(image_folder)) == 0
|
|
@@ -24,10 +24,11 @@ l_conf_point_det = PointDetection(category_name='point', x=100, y=100,
|
|
|
24
24
|
['uncertain', 'unexpected_observations_count']),
|
|
25
25
|
(ImageMetadata(box_detections=[h_conf_box_det], point_detections=[l_conf_point_det]),
|
|
26
26
|
['uncertain'])])
|
|
27
|
-
|
|
27
|
+
@pytest.mark.asyncio
|
|
28
|
+
async def test_unexpected_observations_count(detections: ImageMetadata, reason: List[str]):
|
|
28
29
|
os.environ['LOOP_ORGANIZATION'] = 'zauberzeug'
|
|
29
30
|
os.environ['LOOP_PROJECT'] = 'demo'
|
|
30
31
|
outbox = Outbox()
|
|
31
32
|
|
|
32
|
-
|
|
33
|
-
assert
|
|
33
|
+
relevance_filter = RelevanceFilter(outbox)
|
|
34
|
+
assert await relevance_filter.may_upload_detections(detections, raw_image=b'', cam_id='0:0:0:0', tags=[]) == reason
|
|
@@ -84,7 +84,7 @@ async def test_sio_upload(test_detector_node: DetectorNode, sio_client):
|
|
|
84
84
|
with open(test_image_path, 'rb') as f:
|
|
85
85
|
image_bytes = f.read()
|
|
86
86
|
result = await sio_client.call('upload', {'image': image_bytes})
|
|
87
|
-
assert result
|
|
87
|
+
assert result.get('status') == 'OK'
|
|
88
88
|
assert len(get_outbox_files(test_detector_node.outbox)) == 2, 'There should be one image and one .json file.'
|
|
89
89
|
|
|
90
90
|
|
|
@@ -175,25 +175,3 @@ async def test_rest_outbox_mode(test_detector_node: DetectorNode):
|
|
|
175
175
|
check_switch_to_mode('stopped')
|
|
176
176
|
check_switch_to_mode('continuous_upload')
|
|
177
177
|
check_switch_to_mode('stopped')
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
async def test_api_responsive_during_large_upload(test_detector_node: DetectorNode):
|
|
181
|
-
assert len(get_outbox_files(test_detector_node.outbox)) == 0
|
|
182
|
-
|
|
183
|
-
with open(test_image_path, 'rb') as f:
|
|
184
|
-
image_bytes = f.read()
|
|
185
|
-
|
|
186
|
-
for _ in range(200):
|
|
187
|
-
test_detector_node.outbox.save(image_bytes)
|
|
188
|
-
|
|
189
|
-
outbox_size_early = len(get_outbox_files(test_detector_node.outbox))
|
|
190
|
-
await asyncio.sleep(5) # NOTE: we wait 5 seconds because the continuous upload is running every 5 seconds
|
|
191
|
-
|
|
192
|
-
# check if api is still responsive
|
|
193
|
-
response = requests.get(f'http://localhost:{GLOBALS.detector_port}/outbox_mode', timeout=2)
|
|
194
|
-
assert response.status_code == 200, response.content
|
|
195
|
-
|
|
196
|
-
await asyncio.sleep(5)
|
|
197
|
-
outbox_size_late = len(get_outbox_files(test_detector_node.outbox))
|
|
198
|
-
assert outbox_size_late > 0, 'The outbox should not be fully cleared, maybe the node was too fast.'
|
|
199
|
-
assert outbox_size_early > outbox_size_late, 'The outbox should have been partially emptied.'
|
|
@@ -6,8 +6,6 @@ import shutil
|
|
|
6
6
|
import pytest
|
|
7
7
|
from PIL import Image
|
|
8
8
|
|
|
9
|
-
from ...data_classes import ImageMetadata
|
|
10
|
-
from ...detector.detector_node import DetectorNode
|
|
11
9
|
from ...detector.outbox import Outbox
|
|
12
10
|
from ...globals import GLOBALS
|
|
13
11
|
|
|
@@ -26,31 +24,24 @@ async def test_outbox():
|
|
|
26
24
|
shutil.rmtree(test_outbox.path, ignore_errors=True)
|
|
27
25
|
|
|
28
26
|
|
|
29
|
-
@pytest.mark.asyncio
|
|
30
|
-
async def test_files_are_automatically_uploaded_by_node(test_detector_node: DetectorNode):
|
|
31
|
-
test_detector_node.outbox.save(get_test_image_binary(), ImageMetadata())
|
|
32
|
-
assert await wait_for_outbox_count(test_detector_node.outbox, 1)
|
|
33
|
-
assert await wait_for_outbox_count(test_detector_node.outbox, 0)
|
|
34
|
-
|
|
35
|
-
|
|
36
27
|
@pytest.mark.asyncio
|
|
37
28
|
async def test_set_outbox_mode(test_outbox: Outbox):
|
|
38
29
|
await test_outbox.set_mode('stopped')
|
|
39
|
-
test_outbox.save(get_test_image_binary())
|
|
30
|
+
await test_outbox.save(get_test_image_binary())
|
|
40
31
|
assert await wait_for_outbox_count(test_outbox, 1)
|
|
41
32
|
await asyncio.sleep(6)
|
|
42
33
|
assert await wait_for_outbox_count(test_outbox, 1), 'File was cleared even though outbox should be stopped'
|
|
43
34
|
|
|
44
35
|
await test_outbox.set_mode('continuous_upload')
|
|
45
|
-
assert await wait_for_outbox_count(test_outbox, 0), 'File was not cleared even though outbox should be in continuous_upload'
|
|
36
|
+
assert await wait_for_outbox_count(test_outbox, 0, timeout=15), 'File was not cleared even though outbox should be in continuous_upload'
|
|
46
37
|
assert test_outbox.upload_counter == 1
|
|
47
38
|
|
|
48
39
|
|
|
49
40
|
@pytest.mark.asyncio
|
|
50
41
|
async def test_outbox_upload_is_successful(test_outbox: Outbox):
|
|
51
|
-
test_outbox.save(get_test_image_binary())
|
|
42
|
+
await test_outbox.save(get_test_image_binary())
|
|
52
43
|
await asyncio.sleep(1)
|
|
53
|
-
test_outbox.save(get_test_image_binary())
|
|
44
|
+
await test_outbox.save(get_test_image_binary())
|
|
54
45
|
assert await wait_for_outbox_count(test_outbox, 2)
|
|
55
46
|
await test_outbox.upload()
|
|
56
47
|
assert await wait_for_outbox_count(test_outbox, 0)
|
|
@@ -60,8 +51,8 @@ async def test_outbox_upload_is_successful(test_outbox: Outbox):
|
|
|
60
51
|
@pytest.mark.asyncio
|
|
61
52
|
async def test_invalid_jpg_is_not_saved(test_outbox: Outbox):
|
|
62
53
|
invalid_bytes = b'invalid jpg'
|
|
63
|
-
test_outbox.save(invalid_bytes)
|
|
64
|
-
assert len(test_outbox.
|
|
54
|
+
await test_outbox.save(invalid_bytes)
|
|
55
|
+
assert len(test_outbox.get_upload_folders()) == 0
|
|
65
56
|
|
|
66
57
|
|
|
67
58
|
# ------------------------------ Helper functions --------------------------------------
|
|
@@ -90,7 +81,7 @@ def get_test_image_binary():
|
|
|
90
81
|
|
|
91
82
|
async def wait_for_outbox_count(outbox: Outbox, count: int, timeout: int = 10) -> bool:
|
|
92
83
|
for _ in range(timeout):
|
|
93
|
-
if len(outbox.
|
|
84
|
+
if len(outbox.get_upload_folders()) == count:
|
|
94
85
|
return True
|
|
95
86
|
await asyncio.sleep(1)
|
|
96
87
|
return False
|