boris-behav-obs 9.7.7__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- boris/__init__.py +26 -0
- boris/__main__.py +25 -0
- boris/about.py +143 -0
- boris/add_modifier.py +635 -0
- boris/add_modifier_ui.py +303 -0
- boris/advanced_event_filtering.py +455 -0
- boris/analysis_plugins/__init__.py +0 -0
- boris/analysis_plugins/_latency.py +59 -0
- boris/analysis_plugins/irr_cohen_kappa.py +109 -0
- boris/analysis_plugins/irr_cohen_kappa_with_modifiers.py +112 -0
- boris/analysis_plugins/irr_weighted_cohen_kappa.py +157 -0
- boris/analysis_plugins/irr_weighted_cohen_kappa_with_modifiers.py +162 -0
- boris/analysis_plugins/list_of_dataframe_columns.py +22 -0
- boris/analysis_plugins/number_of_occurences.py +22 -0
- boris/analysis_plugins/number_of_occurences_by_independent_variable.py +54 -0
- boris/analysis_plugins/time_budget.py +61 -0
- boris/behav_coding_map_creator.py +1110 -0
- boris/behavior_binary_table.py +305 -0
- boris/behaviors_coding_map.py +239 -0
- boris/boris_cli.py +340 -0
- boris/cmd_arguments.py +49 -0
- boris/coding_pad.py +280 -0
- boris/config.py +785 -0
- boris/config_file.py +356 -0
- boris/connections.py +409 -0
- boris/converters.py +333 -0
- boris/converters_ui.py +225 -0
- boris/cooccurence.py +250 -0
- boris/core.py +5901 -0
- boris/core_qrc.py +15958 -0
- boris/core_ui.py +1107 -0
- boris/db_functions.py +324 -0
- boris/dev.py +134 -0
- boris/dialog.py +1108 -0
- boris/duration_widget.py +238 -0
- boris/edit_event.py +245 -0
- boris/edit_event_ui.py +233 -0
- boris/event_operations.py +1040 -0
- boris/events_cursor.py +61 -0
- boris/events_snapshots.py +596 -0
- boris/exclusion_matrix.py +141 -0
- boris/export_events.py +1006 -0
- boris/export_observation.py +1203 -0
- boris/external_processes.py +332 -0
- boris/geometric_measurement.py +941 -0
- boris/gui_utilities.py +135 -0
- boris/image_overlay.py +72 -0
- boris/import_observations.py +242 -0
- boris/ipc_mpv.py +325 -0
- boris/irr.py +634 -0
- boris/latency.py +244 -0
- boris/measurement_widget.py +161 -0
- boris/media_file.py +115 -0
- boris/menu_options.py +213 -0
- boris/modifier_coding_map_creator.py +1013 -0
- boris/modifiers_coding_map.py +157 -0
- boris/mpv.py +2016 -0
- boris/mpv2.py +2193 -0
- boris/observation.py +1453 -0
- boris/observation_operations.py +2538 -0
- boris/observation_ui.py +679 -0
- boris/observations_list.py +337 -0
- boris/otx_parser.py +442 -0
- boris/param_panel.py +201 -0
- boris/param_panel_ui.py +305 -0
- boris/player_dock_widget.py +198 -0
- boris/plot_data_module.py +536 -0
- boris/plot_events.py +634 -0
- boris/plot_events_rt.py +237 -0
- boris/plot_spectrogram_rt.py +316 -0
- boris/plot_waveform_rt.py +230 -0
- boris/plugins.py +431 -0
- boris/portion/__init__.py +31 -0
- boris/portion/const.py +95 -0
- boris/portion/dict.py +365 -0
- boris/portion/func.py +52 -0
- boris/portion/interval.py +581 -0
- boris/portion/io.py +181 -0
- boris/preferences.py +510 -0
- boris/preferences_ui.py +770 -0
- boris/project.py +2007 -0
- boris/project_functions.py +2041 -0
- boris/project_import_export.py +1096 -0
- boris/project_ui.py +794 -0
- boris/qrc_boris.py +10389 -0
- boris/qrc_boris5.py +2579 -0
- boris/select_modifiers.py +312 -0
- boris/select_observations.py +210 -0
- boris/select_subj_behav.py +286 -0
- boris/state_events.py +197 -0
- boris/subjects_pad.py +106 -0
- boris/synthetic_time_budget.py +290 -0
- boris/time_budget_functions.py +1136 -0
- boris/time_budget_widget.py +1039 -0
- boris/transitions.py +365 -0
- boris/utilities.py +1810 -0
- boris/version.py +24 -0
- boris/video_equalizer.py +159 -0
- boris/video_equalizer_ui.py +248 -0
- boris/video_operations.py +310 -0
- boris/view_df.py +104 -0
- boris/view_df_ui.py +75 -0
- boris/write_event.py +538 -0
- boris_behav_obs-9.7.7.dist-info/METADATA +139 -0
- boris_behav_obs-9.7.7.dist-info/RECORD +109 -0
- boris_behav_obs-9.7.7.dist-info/WHEEL +5 -0
- boris_behav_obs-9.7.7.dist-info/entry_points.txt +2 -0
- boris_behav_obs-9.7.7.dist-info/licenses/LICENSE.TXT +674 -0
- boris_behav_obs-9.7.7.dist-info/top_level.txt +1 -0
boris/utilities.py
ADDED
|
@@ -0,0 +1,1810 @@
|
|
|
1
|
+
"""
|
|
2
|
+
BORIS
|
|
3
|
+
Behavioral Observation Research Interactive Software
|
|
4
|
+
Copyright 2012-2025 Olivier Friard
|
|
5
|
+
|
|
6
|
+
This program is free software; you can redistribute it and/or modify
|
|
7
|
+
it under the terms of the GNU General Public License as published by
|
|
8
|
+
the Free Software Foundation; either version 2 of the License, or
|
|
9
|
+
(at your option) any later version.
|
|
10
|
+
|
|
11
|
+
This program is distributed in the hope that it will be useful,
|
|
12
|
+
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
13
|
+
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
14
|
+
GNU General Public License for more details.
|
|
15
|
+
|
|
16
|
+
You should have received a copy of the GNU General Public License
|
|
17
|
+
along with this program; if not, write to the Free Software
|
|
18
|
+
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
|
|
19
|
+
MA 02110-1301, USA.
|
|
20
|
+
"""
|
|
21
|
+
|
|
22
|
+
from decimal import Decimal as dec
|
|
23
|
+
from decimal import getcontext, ROUND_DOWN
|
|
24
|
+
from hachoir.metadata import extractMetadata
|
|
25
|
+
from hachoir.parser import createParser
|
|
26
|
+
from shutil import copyfile, which
|
|
27
|
+
from typing import Union, Tuple
|
|
28
|
+
import csv
|
|
29
|
+
import datetime
|
|
30
|
+
import datetime as dt
|
|
31
|
+
import exifread
|
|
32
|
+
import json
|
|
33
|
+
import logging
|
|
34
|
+
import math
|
|
35
|
+
import numpy as np
|
|
36
|
+
import os
|
|
37
|
+
from pathlib import Path
|
|
38
|
+
from PIL.ImageQt import Image
|
|
39
|
+
import platform
|
|
40
|
+
import re
|
|
41
|
+
import shutil
|
|
42
|
+
import subprocess
|
|
43
|
+
import sys
|
|
44
|
+
import urllib.parse
|
|
45
|
+
import urllib.request
|
|
46
|
+
import wave
|
|
47
|
+
import socket
|
|
48
|
+
|
|
49
|
+
from PySide6 import __version__ as pyside6_version
|
|
50
|
+
from PySide6.QtGui import QPixmap, QImage
|
|
51
|
+
from PySide6.QtCore import qVersion
|
|
52
|
+
|
|
53
|
+
from . import config as cfg
|
|
54
|
+
from . import version
|
|
55
|
+
|
|
56
|
+
logger = logging.getLogger(__name__)
|
|
57
|
+
|
|
58
|
+
if (sys.platform.startswith("win") or sys.platform.startswith("linux")) and ("-i" not in sys.argv) and ("--ipc" not in sys.argv):
|
|
59
|
+
try:
|
|
60
|
+
from . import mpv2 as mpv
|
|
61
|
+
except Exception:
|
|
62
|
+
logger.warning("MPV library not found")
|
|
63
|
+
|
|
64
|
+
if sys.platform.startswith("win"):
|
|
65
|
+
import ctypes
|
|
66
|
+
|
|
67
|
+
logger.info("The MPV library was not found!\nIt will be downloaded from the BORIS GitHub repository")
|
|
68
|
+
# ctypes.windll.user32.MessageBoxW(0, "The MPV library was not found!\nIt will be downloaded.", "BORIS", 0)
|
|
69
|
+
|
|
70
|
+
# test if following function works on windows
|
|
71
|
+
MessageBoxTimeoutW = ctypes.windll.user32.MessageBoxTimeoutW
|
|
72
|
+
MessageBoxTimeoutW.argtypes = [ctypes.c_void_p, ctypes.c_wchar_p, ctypes.c_wchar_p, ctypes.c_uint, ctypes.c_uint, ctypes.c_uint]
|
|
73
|
+
ctypes.windll.user32.MessageBoxTimeoutW(
|
|
74
|
+
None, "The MPV library was not found.\nIt will be downloaded from the BORIS GitHub repository.", "MPV library", 0, 0, 10000
|
|
75
|
+
) # time out
|
|
76
|
+
|
|
77
|
+
# download libmpv2.dll from https://github.com/boris-behav-obs/boris-behav-obs.github.io/releases/download/files/
|
|
78
|
+
|
|
79
|
+
url: str = "https://github.com/boris-behav-obs/boris-behav-obs.github.io/releases/download/files/"
|
|
80
|
+
|
|
81
|
+
external_files_dir = ""
|
|
82
|
+
# search where to download libmpv-2.dll
|
|
83
|
+
|
|
84
|
+
external_files_dir = Path(__file__).parent / "misc"
|
|
85
|
+
if not external_files_dir.is_dir():
|
|
86
|
+
logger.info(f"Creating {external_files_dir} directory")
|
|
87
|
+
external_files_dir.mkdir(parents=True, exist_ok=True)
|
|
88
|
+
|
|
89
|
+
logger.info(f"MPV library directory: {external_files_dir}")
|
|
90
|
+
|
|
91
|
+
local_filename = external_files_dir / "libmpv-2.dll"
|
|
92
|
+
logger.info("Downloading libmpv-2.dll...")
|
|
93
|
+
try:
|
|
94
|
+
urllib.request.urlretrieve(url + "libmpv-2.dll", local_filename)
|
|
95
|
+
logger.info(f"File downloaded as {local_filename}")
|
|
96
|
+
except Exception:
|
|
97
|
+
logger.critical("The MPV library can not be downloaded! Check your connection.")
|
|
98
|
+
ctypes.windll.user32.MessageBoxW(0, "The MPV library can not be downloaded!\nCheck your connection.", "BORIS", 0)
|
|
99
|
+
sys.exit(5)
|
|
100
|
+
# reload package
|
|
101
|
+
try:
|
|
102
|
+
from . import mpv2 as mpv
|
|
103
|
+
except Exception:
|
|
104
|
+
logger.critical("MPV library not found after dowloading")
|
|
105
|
+
sys.exit(5)
|
|
106
|
+
|
|
107
|
+
elif sys.platform.startswith("linux"):
|
|
108
|
+
text = (
|
|
109
|
+
"The MPV library was not found!\nInstall it\n\n"
|
|
110
|
+
"With Debian/Ubuntu/Mint:\nsudo apt install libmpv2\n\n"
|
|
111
|
+
"With Fedora:\nsudo dnf install mpv-libs\n\n"
|
|
112
|
+
"With OpenSUSE:\nsudo zypper install mpv\n\n"
|
|
113
|
+
"Arch Linux / Manjaro:\nsudo pacman -S mpv\n\n"
|
|
114
|
+
)
|
|
115
|
+
if shutil.which("zenity") is not None:
|
|
116
|
+
subprocess.run(["zenity", "--error", f"--text={text}"])
|
|
117
|
+
elif shutil.which("kdialog"):
|
|
118
|
+
subprocess.run(["kdialog", "--msgbox", text])
|
|
119
|
+
elif shutil.which("gxmessage"):
|
|
120
|
+
subprocess.run(["gxmessage", text])
|
|
121
|
+
elif shutil.which("xmessage"):
|
|
122
|
+
subprocess.run(["xmessage", text])
|
|
123
|
+
|
|
124
|
+
sys.exit(5)
|
|
125
|
+
else:
|
|
126
|
+
sys.exit(5)
|
|
127
|
+
|
|
128
|
+
|
|
129
|
+
def test_mpv_ipc(socket_path: str = cfg.MPV_SOCKET) -> bool:
|
|
130
|
+
"""
|
|
131
|
+
test if socket available
|
|
132
|
+
"""
|
|
133
|
+
if not os.path.exists(socket_path):
|
|
134
|
+
return False
|
|
135
|
+
|
|
136
|
+
try:
|
|
137
|
+
client = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
|
|
138
|
+
client.connect(socket_path)
|
|
139
|
+
client.close()
|
|
140
|
+
return True
|
|
141
|
+
except Exception:
|
|
142
|
+
return False
|
|
143
|
+
|
|
144
|
+
|
|
145
|
+
def extract_exif_DateTimeOriginal(file_path: str) -> int:
|
|
146
|
+
"""
|
|
147
|
+
extract the EXIF DateTimeOriginal tag
|
|
148
|
+
return epoch time
|
|
149
|
+
if the tag is not available return -1
|
|
150
|
+
|
|
151
|
+
Args:
|
|
152
|
+
file_path (str): path of the media file
|
|
153
|
+
|
|
154
|
+
Returns:
|
|
155
|
+
int: timestamp
|
|
156
|
+
|
|
157
|
+
"""
|
|
158
|
+
try:
|
|
159
|
+
with open(file_path, "rb") as f_in:
|
|
160
|
+
tags = exifread.process_file(f_in, details=False, stop_tag="EXIF DateTimeOriginal")
|
|
161
|
+
if "EXIF DateTimeOriginal" in tags:
|
|
162
|
+
date_time_original = (
|
|
163
|
+
f"{tags['EXIF DateTimeOriginal'].values[:4]}-"
|
|
164
|
+
f"{tags['EXIF DateTimeOriginal'].values[5:7]}-"
|
|
165
|
+
f"{tags['EXIF DateTimeOriginal'].values[8:10]} "
|
|
166
|
+
f"{tags['EXIF DateTimeOriginal'].values.split(' ')[-1]}"
|
|
167
|
+
)
|
|
168
|
+
return int(datetime.datetime.strptime(date_time_original, "%Y-%m-%d %H:%M:%S").timestamp())
|
|
169
|
+
else:
|
|
170
|
+
try:
|
|
171
|
+
# read from file name (YYYY-MM-DD_HHMMSS)
|
|
172
|
+
return int(datetime.datetime.strptime(Path(file_path).stem, "%Y-%m-%d_%H%M%S").timestamp())
|
|
173
|
+
except Exception:
|
|
174
|
+
# read from file name (YYYY-MM-DD_HH:MM:SS)
|
|
175
|
+
return int(datetime.datetime.strptime(Path(file_path).stem, "%Y-%m-%d_%H:%M:%S").timestamp())
|
|
176
|
+
|
|
177
|
+
except Exception:
|
|
178
|
+
return -1
|
|
179
|
+
|
|
180
|
+
|
|
181
|
+
def extract_video_creation_date(file_path: str) -> int | None:
|
|
182
|
+
"""
|
|
183
|
+
returns the timestamp of the media creation date time with Hachoir
|
|
184
|
+
"""
|
|
185
|
+
|
|
186
|
+
logger.debug(f"extract_video_creation_date for {file_path}")
|
|
187
|
+
|
|
188
|
+
if not Path(file_path).is_file():
|
|
189
|
+
logger.debug(f"{file_path} not found")
|
|
190
|
+
return None
|
|
191
|
+
try:
|
|
192
|
+
parser = createParser(file_path)
|
|
193
|
+
metadata = extractMetadata(parser)
|
|
194
|
+
except Exception:
|
|
195
|
+
return None
|
|
196
|
+
|
|
197
|
+
if metadata.has("creation_date"):
|
|
198
|
+
if metadata.get("creation_date") == datetime.datetime(1904, 1, 1, 0, 0):
|
|
199
|
+
return None
|
|
200
|
+
return metadata.get("creation_date").timestamp()
|
|
201
|
+
else:
|
|
202
|
+
return None
|
|
203
|
+
|
|
204
|
+
|
|
205
|
+
def extract_date_time_from_file_name(file_path: str) -> int | None:
|
|
206
|
+
"""
|
|
207
|
+
extract YYYY-MM-DD_HHMMSS or YYYY-MM-DD_HH:MM:SS from file name
|
|
208
|
+
"""
|
|
209
|
+
|
|
210
|
+
patterns = (r"\d{4}-\d{2}-\d{2}_\d{6}", r"\d{4}-\d{2}-\d{2}_\d{2}:\d{2}:\d{2}")
|
|
211
|
+
for pattern in patterns:
|
|
212
|
+
matches = re.findall(pattern, file_path)
|
|
213
|
+
|
|
214
|
+
if matches:
|
|
215
|
+
if pattern == r"\d{4}-\d{2}-\d{2}_\d{6}":
|
|
216
|
+
logger.debug(
|
|
217
|
+
f"extract_date_time_from_file_name timestamp from {file_path}: {int(datetime.datetime.strptime(matches[0], '%Y-%m-%d_%H%M%S').timestamp())}"
|
|
218
|
+
)
|
|
219
|
+
|
|
220
|
+
return int(datetime.datetime.strptime(matches[0], "%Y-%m-%d_%H%M%S").timestamp())
|
|
221
|
+
|
|
222
|
+
if pattern == r"\d{4}-\d{2}-\d{2}_\d{2}:\d{2}:\d{2}":
|
|
223
|
+
logger.debug(
|
|
224
|
+
f"extract_date_time_from_file_name timestamp from {file_path}: {int(datetime.datetime.strptime(matches[0], '%Y-%m-%d_%H:%M:%S').timestamp())}"
|
|
225
|
+
)
|
|
226
|
+
|
|
227
|
+
return int(datetime.datetime.strptime(matches[0], "%Y-%m-%d_%H:%M:%S").timestamp())
|
|
228
|
+
|
|
229
|
+
return None
|
|
230
|
+
|
|
231
|
+
|
|
232
|
+
def mpv_lib_version() -> Tuple[str, str, str]:
|
|
233
|
+
"""
|
|
234
|
+
Version of MPV library
|
|
235
|
+
|
|
236
|
+
Returns:
|
|
237
|
+
str: MPV library version
|
|
238
|
+
"""
|
|
239
|
+
if ("-i" in sys.argv) or ("--ipc" in sys.argv) or sys.platform.startswith("darwin"):
|
|
240
|
+
return "MPV IPC mode", "", ""
|
|
241
|
+
|
|
242
|
+
mpv_lib_file = None
|
|
243
|
+
if sys.platform.startswith("linux"):
|
|
244
|
+
mpv_lib_file = mpv.sofile
|
|
245
|
+
if sys.platform.startswith("win"):
|
|
246
|
+
mpv_lib_file = mpv.dll
|
|
247
|
+
|
|
248
|
+
return (".".join([str(x) for x in mpv._mpv_client_api_version()]), mpv_lib_file, mpv.MPV_VERSION)
|
|
249
|
+
|
|
250
|
+
|
|
251
|
+
def python_mpv_script_version() -> str:
|
|
252
|
+
"""
|
|
253
|
+
version of python-mpv script
|
|
254
|
+
"""
|
|
255
|
+
try:
|
|
256
|
+
return mpv.__version__
|
|
257
|
+
except Exception:
|
|
258
|
+
return "Not found"
|
|
259
|
+
|
|
260
|
+
|
|
261
|
+
def error_info(exc_info: tuple) -> tuple:
|
|
262
|
+
"""
|
|
263
|
+
return details about error
|
|
264
|
+
usage: error_info(sys.exc_info())
|
|
265
|
+
|
|
266
|
+
Args:
|
|
267
|
+
sys.exc_info() (tuple):
|
|
268
|
+
|
|
269
|
+
Returns:
|
|
270
|
+
tuple: error type, error file name, error line number
|
|
271
|
+
"""
|
|
272
|
+
|
|
273
|
+
exc_type, exc_obj, exc_tb = exc_info
|
|
274
|
+
fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
|
|
275
|
+
|
|
276
|
+
return (f"{exc_type}: {exc_obj}", fname, exc_tb.tb_lineno)
|
|
277
|
+
|
|
278
|
+
|
|
279
|
+
def pil2pixmap(im: Image) -> QPixmap:
|
|
280
|
+
"""
|
|
281
|
+
convert PIL image to pixmap
|
|
282
|
+
see https://stackoverflow.com/questions/34697559/pil-image-to-qpixmap-conversion-issue
|
|
283
|
+
"""
|
|
284
|
+
|
|
285
|
+
if im.mode == "RGB":
|
|
286
|
+
r, g, b = im.split()
|
|
287
|
+
im = Image.merge("RGB", (b, g, r))
|
|
288
|
+
elif im.mode == "RGBA":
|
|
289
|
+
r, g, b, a = im.split()
|
|
290
|
+
im = Image.merge("RGBA", (b, g, r, a))
|
|
291
|
+
elif im.mode == "L":
|
|
292
|
+
im = im.convert("RGBA")
|
|
293
|
+
|
|
294
|
+
im2 = im.convert("RGBA")
|
|
295
|
+
data = im2.tobytes("raw", "RGBA")
|
|
296
|
+
qim = QImage(data, im.size[0], im.size[1], QImage.Format_ARGB32)
|
|
297
|
+
pixmap = QPixmap.fromImage(qim)
|
|
298
|
+
return pixmap
|
|
299
|
+
|
|
300
|
+
|
|
301
|
+
def replace_leading_trailing_chars(s: str, old_char: str, new_char: str) -> str:
|
|
302
|
+
"""
|
|
303
|
+
replace leading and trailing old_char by new_char
|
|
304
|
+
|
|
305
|
+
Args:
|
|
306
|
+
s: string
|
|
307
|
+
old_char: character to be replaced
|
|
308
|
+
new_char: character for replacing
|
|
309
|
+
|
|
310
|
+
Returns:
|
|
311
|
+
str: string with characters replaced
|
|
312
|
+
"""
|
|
313
|
+
|
|
314
|
+
sp = s.partition(s.strip(old_char))
|
|
315
|
+
|
|
316
|
+
return f"{sp[0].replace(old_char, new_char)}{sp[1]}{sp[2].replace(old_char, new_char)}"
|
|
317
|
+
|
|
318
|
+
|
|
319
|
+
def return_file_header(file_name: str, row_number: int = 5) -> list:
|
|
320
|
+
"""
|
|
321
|
+
return file header
|
|
322
|
+
|
|
323
|
+
Args:
|
|
324
|
+
file_name (str): path of file
|
|
325
|
+
row_number (int): number of rows to return
|
|
326
|
+
|
|
327
|
+
Returns:
|
|
328
|
+
list: first row_number row(s) of file_name
|
|
329
|
+
"""
|
|
330
|
+
header: list = []
|
|
331
|
+
try:
|
|
332
|
+
with open(file_name) as f_in:
|
|
333
|
+
for _ in range(row_number):
|
|
334
|
+
header.append(f_in.readline())
|
|
335
|
+
except Exception:
|
|
336
|
+
return []
|
|
337
|
+
return header
|
|
338
|
+
|
|
339
|
+
|
|
340
|
+
def return_file_header_footer(file_name: str, file_row_number: int = 0, row_number: int = 5) -> Tuple[list, list]:
|
|
341
|
+
"""
|
|
342
|
+
return file header and footer
|
|
343
|
+
|
|
344
|
+
Args:
|
|
345
|
+
file_name (str): path of file
|
|
346
|
+
file_row_number (int): total rows number of file
|
|
347
|
+
row_number (int): number of rows to return
|
|
348
|
+
|
|
349
|
+
Returns:
|
|
350
|
+
list: first row_number row(s) of file_name
|
|
351
|
+
"""
|
|
352
|
+
header: list = []
|
|
353
|
+
footer: list = []
|
|
354
|
+
try:
|
|
355
|
+
row_idx: int = 0
|
|
356
|
+
with open(file_name, "rt") as f_in:
|
|
357
|
+
for row in f_in:
|
|
358
|
+
if row_idx < row_number:
|
|
359
|
+
header.append(row.strip())
|
|
360
|
+
if file_row_number > row_number * 2 and (row_idx >= file_row_number - row_number):
|
|
361
|
+
footer.append(row.strip())
|
|
362
|
+
row_idx += 1
|
|
363
|
+
|
|
364
|
+
except Exception:
|
|
365
|
+
return [], []
|
|
366
|
+
return header, footer
|
|
367
|
+
|
|
368
|
+
|
|
369
|
+
def convertTime(time_format: str, sec: Union[float, dec]) -> Union[str, None]:
|
|
370
|
+
"""
|
|
371
|
+
convert time in base at the current format (S or HHMMSS)
|
|
372
|
+
|
|
373
|
+
Args:
|
|
374
|
+
sec (float): time in seconds
|
|
375
|
+
|
|
376
|
+
Returns:
|
|
377
|
+
string: time in base of current format (self.timeFormat S or cfg.HHMMSS)
|
|
378
|
+
"""
|
|
379
|
+
|
|
380
|
+
if isinstance(sec, dec) and sec.is_nan():
|
|
381
|
+
return cfg.NA
|
|
382
|
+
|
|
383
|
+
if time_format == cfg.S:
|
|
384
|
+
return f"{sec:.3f}"
|
|
385
|
+
|
|
386
|
+
if time_format == cfg.HHMMSS:
|
|
387
|
+
return seconds2time(sec)
|
|
388
|
+
|
|
389
|
+
return None
|
|
390
|
+
|
|
391
|
+
|
|
392
|
+
def smart_time_format(sec: Union[float, dec], time_format: str = cfg.S, cutoff: dec = cfg.SMART_TIME_CUTOFF_DEFAULT) -> str:
|
|
393
|
+
"""
|
|
394
|
+
Smart time format
|
|
395
|
+
returns time in seconds if <= cutoff else in HH:MM:SS.ZZZ format
|
|
396
|
+
"""
|
|
397
|
+
# cutoff = 0 follows the time format selectd by user
|
|
398
|
+
if cutoff == 0:
|
|
399
|
+
return convertTime(time_format, sec)
|
|
400
|
+
if sec <= cutoff:
|
|
401
|
+
return f"{sec:.3f}"
|
|
402
|
+
else:
|
|
403
|
+
return seconds2time(sec)
|
|
404
|
+
|
|
405
|
+
|
|
406
|
+
def convert_time_to_decimal(pj: dict) -> dict:
|
|
407
|
+
"""
|
|
408
|
+
convert time of project from float to decimal
|
|
409
|
+
|
|
410
|
+
Args:
|
|
411
|
+
pj (dict): BORIS project
|
|
412
|
+
|
|
413
|
+
Returns:
|
|
414
|
+
dict: BORIS project
|
|
415
|
+
"""
|
|
416
|
+
for obs_id in pj[cfg.OBSERVATIONS]:
|
|
417
|
+
if cfg.TIME_OFFSET in pj[cfg.OBSERVATIONS][obs_id]:
|
|
418
|
+
if pj[cfg.OBSERVATIONS][obs_id][cfg.TIME_OFFSET] is not None:
|
|
419
|
+
pj[cfg.OBSERVATIONS][obs_id][cfg.TIME_OFFSET] = dec(str(pj[cfg.OBSERVATIONS][obs_id][cfg.TIME_OFFSET]))
|
|
420
|
+
else:
|
|
421
|
+
pj[cfg.OBSERVATIONS][obs_id][cfg.TIME_OFFSET] = dec("0.000")
|
|
422
|
+
for idx, _ in enumerate(pj[cfg.OBSERVATIONS][obs_id][cfg.EVENTS]):
|
|
423
|
+
pj[cfg.OBSERVATIONS][obs_id][cfg.EVENTS][idx][cfg.EVENT_TIME_FIELD_IDX] = dec(
|
|
424
|
+
pj[cfg.OBSERVATIONS][obs_id][cfg.EVENTS][idx][cfg.EVENT_TIME_FIELD_IDX]
|
|
425
|
+
).quantize(dec(".001"))
|
|
426
|
+
|
|
427
|
+
return pj
|
|
428
|
+
|
|
429
|
+
|
|
430
|
+
def count_media_file(media_files: dict) -> int:
|
|
431
|
+
"""
|
|
432
|
+
count number of media file for observation
|
|
433
|
+
"""
|
|
434
|
+
return sum([len(media_files[idx]) for idx in media_files])
|
|
435
|
+
|
|
436
|
+
|
|
437
|
+
def txt2np_array(
|
|
438
|
+
file_name: str, columns_str: str, substract_first_value: str, converters=None, column_converter=None
|
|
439
|
+
) -> Tuple[bool, str, np.array]:
|
|
440
|
+
"""
|
|
441
|
+
read a txt file (tsv or csv) and return a np array with columns cited in columns_str
|
|
442
|
+
|
|
443
|
+
Args:
|
|
444
|
+
file_name (str): path of the file to load in numpy array
|
|
445
|
+
columns_str (str): indexes of columns to be loaded. First columns must be the timestamp. Example: "4,5"
|
|
446
|
+
substract_first_value (str): "True" or "False"
|
|
447
|
+
converters (dict): dictionary containing converters
|
|
448
|
+
column_converter (dict): dictionary key: column index, value: converter name
|
|
449
|
+
|
|
450
|
+
Returns:
|
|
451
|
+
bool: True if data successfullly loaded, False if case of error
|
|
452
|
+
str: error message. Empty if success
|
|
453
|
+
numpy array: data. Empty if not data failed to be loaded
|
|
454
|
+
|
|
455
|
+
"""
|
|
456
|
+
if converters is None:
|
|
457
|
+
converters = {}
|
|
458
|
+
if column_converter is None:
|
|
459
|
+
column_converter = {}
|
|
460
|
+
|
|
461
|
+
# check columns
|
|
462
|
+
try:
|
|
463
|
+
columns = [int(x) - 1 for x in columns_str.split(",")]
|
|
464
|
+
except Exception:
|
|
465
|
+
return False, f"Problem with columns {columns_str}", np.array([])
|
|
466
|
+
|
|
467
|
+
# check converters
|
|
468
|
+
np_converters: dict = {}
|
|
469
|
+
for column_idx in column_converter:
|
|
470
|
+
if column_converter[column_idx] in converters:
|
|
471
|
+
conv_name = column_converter[column_idx]
|
|
472
|
+
|
|
473
|
+
function = f"""def {conv_name}(INPUT):\n"""
|
|
474
|
+
function += """ INPUT = INPUT.decode("utf-8") if isinstance(INPUT, bytes) else INPUT\n\n"""
|
|
475
|
+
for line in converters[conv_name]["code"].split("\n"):
|
|
476
|
+
function += f" {line}\n"
|
|
477
|
+
function += """ return OUTPUT"""
|
|
478
|
+
|
|
479
|
+
print("=============")
|
|
480
|
+
print(function)
|
|
481
|
+
print("=============")
|
|
482
|
+
|
|
483
|
+
import types
|
|
484
|
+
|
|
485
|
+
mod = types.ModuleType("converter_module")
|
|
486
|
+
exec(function, mod.__dict__)
|
|
487
|
+
|
|
488
|
+
"""
|
|
489
|
+
try:
|
|
490
|
+
exec(function)
|
|
491
|
+
except Exception:
|
|
492
|
+
return False, f"error in converter: {sys.exc_info()[1]}", np.array([])
|
|
493
|
+
|
|
494
|
+
print(f"{converters=}")
|
|
495
|
+
print(f"{column_converter=}")
|
|
496
|
+
print(locals())
|
|
497
|
+
print(f"{conv_name=}")
|
|
498
|
+
"""
|
|
499
|
+
|
|
500
|
+
# np_converters[column_idx - 1] = locals()['conv_name']
|
|
501
|
+
np_converters[column_idx - 1] = getattr(mod, conv_name)
|
|
502
|
+
|
|
503
|
+
else:
|
|
504
|
+
return False, f"converter {column_converter[column_idx]} not found", np.array([])
|
|
505
|
+
|
|
506
|
+
# snif txt file
|
|
507
|
+
try:
|
|
508
|
+
with open(file_name) as csvfile:
|
|
509
|
+
buff = csvfile.read(4096)
|
|
510
|
+
snif = csv.Sniffer()
|
|
511
|
+
dialect = snif.sniff(buff)
|
|
512
|
+
"""has_header = snif.has_header(buff)"""
|
|
513
|
+
# count number of header rows
|
|
514
|
+
header_rows_nb = 0
|
|
515
|
+
csv.register_dialect("dialect", dialect)
|
|
516
|
+
with open(file_name, "r") as f:
|
|
517
|
+
reader = csv.reader(f, dialect="dialect")
|
|
518
|
+
for row in reader:
|
|
519
|
+
if sum([isinstance(intfloatstr(x), str) for x in row]) == len(row):
|
|
520
|
+
header_rows_nb += 1
|
|
521
|
+
|
|
522
|
+
except Exception:
|
|
523
|
+
return False, f"{sys.exc_info()[1]}", np.array([])
|
|
524
|
+
|
|
525
|
+
try:
|
|
526
|
+
data = np.loadtxt(file_name, delimiter=dialect.delimiter, usecols=columns, skiprows=header_rows_nb, converters=np_converters)
|
|
527
|
+
|
|
528
|
+
except Exception:
|
|
529
|
+
return False, f"{sys.exc_info()[1]}", np.array([])
|
|
530
|
+
|
|
531
|
+
# check if first value must be substracted
|
|
532
|
+
if substract_first_value == "True":
|
|
533
|
+
data[:, 0] -= data[:, 0][0]
|
|
534
|
+
|
|
535
|
+
return True, "", data
|
|
536
|
+
|
|
537
|
+
|
|
538
|
+
def versiontuple(version_str: str) -> tuple:
|
|
539
|
+
"""
|
|
540
|
+
Convert version from str to tuple of str
|
|
541
|
+
|
|
542
|
+
Args:
|
|
543
|
+
version_str (str): version
|
|
544
|
+
|
|
545
|
+
Returns:
|
|
546
|
+
tuple[str, str, str]: version in tuple format (for comparison)
|
|
547
|
+
"""
|
|
548
|
+
filled = []
|
|
549
|
+
for point in version_str.split("."):
|
|
550
|
+
filled.append(point.zfill(8))
|
|
551
|
+
return tuple(filled)
|
|
552
|
+
|
|
553
|
+
|
|
554
|
+
def behavior_user_color(ethogram: dict, behavior_code: str) -> Union[str, None]:
|
|
555
|
+
"""
|
|
556
|
+
returns the color of behavior if defined else None
|
|
557
|
+
"""
|
|
558
|
+
for x in ethogram:
|
|
559
|
+
if ethogram[x][cfg.BEHAVIOR_CODE] == behavior_code:
|
|
560
|
+
if ethogram[x].get(cfg.COLOR, None) == "":
|
|
561
|
+
return None
|
|
562
|
+
else:
|
|
563
|
+
return ethogram[x].get(cfg.COLOR, None)
|
|
564
|
+
|
|
565
|
+
return None
|
|
566
|
+
|
|
567
|
+
|
|
568
|
+
def behav_category_user_color(behavioral_categories: dict, name: str) -> Union[str, None]:
|
|
569
|
+
"""
|
|
570
|
+
returns the color of the behavioral category if defined else None
|
|
571
|
+
"""
|
|
572
|
+
for key in behavioral_categories:
|
|
573
|
+
if behavioral_categories[key]["name"] == name:
|
|
574
|
+
return behavioral_categories[key].get(cfg.COLOR, None)
|
|
575
|
+
|
|
576
|
+
return None
|
|
577
|
+
|
|
578
|
+
|
|
579
|
+
def state_behavior_codes(ethogram: dict) -> list:
|
|
580
|
+
"""
|
|
581
|
+
returns a list of behavior codes defined as STATE event
|
|
582
|
+
|
|
583
|
+
Args:
|
|
584
|
+
ethogram (dict): ethogram dictionary
|
|
585
|
+
|
|
586
|
+
Returns:
|
|
587
|
+
list: list of behavior codes defined as STATE event
|
|
588
|
+
|
|
589
|
+
"""
|
|
590
|
+
return [ethogram[x][cfg.BEHAVIOR_CODE] for x in ethogram if ethogram[x][cfg.TYPE] in cfg.STATE_EVENT_TYPES]
|
|
591
|
+
|
|
592
|
+
|
|
593
|
+
def point_behavior_codes(ethogram: dict) -> list:
|
|
594
|
+
"""
|
|
595
|
+
returns a list of behavior codes defined as POINT event
|
|
596
|
+
|
|
597
|
+
Args:
|
|
598
|
+
ethogram (dict): ethogram dictionary
|
|
599
|
+
|
|
600
|
+
Returns:
|
|
601
|
+
list: list of behavior codes defined as POINT event
|
|
602
|
+
|
|
603
|
+
"""
|
|
604
|
+
return [ethogram[x][cfg.BEHAVIOR_CODE] for x in ethogram if ethogram[x][cfg.TYPE] in (cfg.POINT_EVENT, cfg.POINT_EVENT_WITH_CODING_MAP)]
|
|
605
|
+
|
|
606
|
+
|
|
607
|
+
def group_events(pj: dict, obs_id: str, include_modifiers: bool = False) -> dict:
|
|
608
|
+
"""
|
|
609
|
+
group events by subject, behavior, modifier (if required)
|
|
610
|
+
|
|
611
|
+
result is a dict like:
|
|
612
|
+
|
|
613
|
+
{(subject, behavior, ""): list of tuple (start: Decimal, end: Decimal)}
|
|
614
|
+
|
|
615
|
+
or with modifiers:
|
|
616
|
+
|
|
617
|
+
{(subject, behavior, modifier): list of tuple (start: Decimal, end: Decimal)}
|
|
618
|
+
|
|
619
|
+
in case of point events start=end
|
|
620
|
+
"""
|
|
621
|
+
|
|
622
|
+
try:
|
|
623
|
+
state_events_list = state_behavior_codes(pj[cfg.ETHOGRAM])
|
|
624
|
+
point_events_list = point_behavior_codes(pj[cfg.ETHOGRAM])
|
|
625
|
+
mem_behav = {}
|
|
626
|
+
intervals_behav = {}
|
|
627
|
+
|
|
628
|
+
for event in pj[cfg.OBSERVATIONS][obs_id][cfg.EVENTS]:
|
|
629
|
+
time_ = event[cfg.EVENT_TIME_FIELD_IDX]
|
|
630
|
+
subject = event[cfg.EVENT_SUBJECT_FIELD_IDX]
|
|
631
|
+
code = event[cfg.EVENT_BEHAVIOR_FIELD_IDX]
|
|
632
|
+
modifier = event[cfg.EVENT_MODIFIER_FIELD_IDX] if include_modifiers else ""
|
|
633
|
+
|
|
634
|
+
# check if code is state
|
|
635
|
+
if code in state_events_list:
|
|
636
|
+
if (subject, code, modifier) in mem_behav and mem_behav[(subject, code, modifier)]:
|
|
637
|
+
if (subject, code, modifier) not in intervals_behav:
|
|
638
|
+
intervals_behav[(subject, code, modifier)] = []
|
|
639
|
+
intervals_behav[(subject, code, modifier)].append((mem_behav[(subject, code, modifier)], time_))
|
|
640
|
+
|
|
641
|
+
mem_behav[(subject, code, modifier)] = 0
|
|
642
|
+
else:
|
|
643
|
+
mem_behav[(subject, code, modifier)] = time_
|
|
644
|
+
|
|
645
|
+
# check if code is state
|
|
646
|
+
if code in point_events_list:
|
|
647
|
+
if (subject, code, modifier) not in intervals_behav:
|
|
648
|
+
intervals_behav[(subject, code, modifier)] = []
|
|
649
|
+
intervals_behav[(subject, code, modifier)].append((time_, time_))
|
|
650
|
+
|
|
651
|
+
return intervals_behav
|
|
652
|
+
|
|
653
|
+
except Exception:
|
|
654
|
+
return {"error": ""}
|
|
655
|
+
|
|
656
|
+
|
|
657
|
+
def flatten_list(nested_list) -> list:
|
|
658
|
+
"""
|
|
659
|
+
Flatten a list of lists.
|
|
660
|
+
"""
|
|
661
|
+
flattened: list = []
|
|
662
|
+
for item in nested_list:
|
|
663
|
+
if isinstance(item, list):
|
|
664
|
+
flattened.extend(flatten_list(item))
|
|
665
|
+
else:
|
|
666
|
+
flattened.append(item)
|
|
667
|
+
return flattened
|
|
668
|
+
|
|
669
|
+
|
|
670
|
+
def get_current_states_modifiers_by_subject(
|
|
671
|
+
state_behaviors_codes: list, events: list, subjects: dict, time_: dec, include_modifiers: bool = False
|
|
672
|
+
) -> dict:
|
|
673
|
+
"""
|
|
674
|
+
get current states and modifiers (if requested) for subjects at given time
|
|
675
|
+
|
|
676
|
+
Args:
|
|
677
|
+
state_behaviors_codes (list): list of behavior codes defined as STATE event
|
|
678
|
+
events (list): list of events
|
|
679
|
+
subjects (dict): dictionary of subjects
|
|
680
|
+
time (Decimal): time or image index for an observation from images
|
|
681
|
+
include_modifiers (bool): include modifier if True (default: False)
|
|
682
|
+
|
|
683
|
+
Returns:
|
|
684
|
+
dict: current states by subject. dict of list
|
|
685
|
+
"""
|
|
686
|
+
current_states: dict = {}
|
|
687
|
+
if time_.is_nan():
|
|
688
|
+
for idx in subjects:
|
|
689
|
+
current_states[idx] = []
|
|
690
|
+
return current_states
|
|
691
|
+
|
|
692
|
+
# check if time contains NA
|
|
693
|
+
if [x for x in events if x[cfg.EVENT_TIME_FIELD_IDX].is_nan()]:
|
|
694
|
+
check_index = cfg.PJ_OBS_FIELDS[cfg.IMAGES][cfg.IMAGE_INDEX]
|
|
695
|
+
else:
|
|
696
|
+
check_index = cfg.EVENT_TIME_FIELD_IDX
|
|
697
|
+
|
|
698
|
+
if include_modifiers:
|
|
699
|
+
for idx in subjects:
|
|
700
|
+
current_states[subjects[idx]["name"]] = {}
|
|
701
|
+
for x in events:
|
|
702
|
+
if x[check_index] > time_:
|
|
703
|
+
break
|
|
704
|
+
if x[cfg.EVENT_BEHAVIOR_FIELD_IDX] in state_behaviors_codes:
|
|
705
|
+
if (x[cfg.EVENT_BEHAVIOR_FIELD_IDX], x[cfg.EVENT_MODIFIER_FIELD_IDX]) not in current_states[x[cfg.EVENT_SUBJECT_FIELD_IDX]]:
|
|
706
|
+
current_states[x[cfg.EVENT_SUBJECT_FIELD_IDX]][(x[cfg.EVENT_BEHAVIOR_FIELD_IDX], x[cfg.EVENT_MODIFIER_FIELD_IDX])] = (
|
|
707
|
+
False
|
|
708
|
+
)
|
|
709
|
+
|
|
710
|
+
current_states[x[cfg.EVENT_SUBJECT_FIELD_IDX]][
|
|
711
|
+
(x[cfg.EVENT_BEHAVIOR_FIELD_IDX], x[cfg.EVENT_MODIFIER_FIELD_IDX])
|
|
712
|
+
] = not current_states[x[cfg.EVENT_SUBJECT_FIELD_IDX]][(x[cfg.EVENT_BEHAVIOR_FIELD_IDX], x[cfg.EVENT_MODIFIER_FIELD_IDX])]
|
|
713
|
+
|
|
714
|
+
r: dict = {}
|
|
715
|
+
for idx in subjects:
|
|
716
|
+
r[idx] = [f"{bm[0]} ({bm[1]})" for bm in current_states[subjects[idx]["name"]] if current_states[subjects[idx]["name"]][bm]]
|
|
717
|
+
|
|
718
|
+
else:
|
|
719
|
+
for idx in subjects:
|
|
720
|
+
current_states[subjects[idx]["name"]] = {}
|
|
721
|
+
for b in state_behaviors_codes:
|
|
722
|
+
current_states[subjects[idx]["name"]][b] = False
|
|
723
|
+
for x in events:
|
|
724
|
+
if x[check_index] > time_:
|
|
725
|
+
break
|
|
726
|
+
if x[cfg.EVENT_BEHAVIOR_FIELD_IDX] in state_behaviors_codes:
|
|
727
|
+
current_states[x[cfg.EVENT_SUBJECT_FIELD_IDX]][x[cfg.EVENT_BEHAVIOR_FIELD_IDX]] = not current_states[
|
|
728
|
+
x[cfg.EVENT_SUBJECT_FIELD_IDX]
|
|
729
|
+
][x[cfg.EVENT_BEHAVIOR_FIELD_IDX]]
|
|
730
|
+
|
|
731
|
+
r: dict = {}
|
|
732
|
+
for idx in subjects:
|
|
733
|
+
r[idx] = [b for b in state_behaviors_codes if current_states[subjects[idx]["name"]][b]]
|
|
734
|
+
|
|
735
|
+
return r
|
|
736
|
+
|
|
737
|
+
|
|
738
|
+
def get_current_states_modifiers_by_subject_2(state_behaviors_codes: list, events: list, subjects: dict, time: dec) -> dict:
|
|
739
|
+
"""
|
|
740
|
+
get current states and modifiers for subjects at given time
|
|
741
|
+
differs from get_current_states_modifiers_by_subject in the output format: [behavior, modifiers]
|
|
742
|
+
|
|
743
|
+
Args:
|
|
744
|
+
state_behaviors_codes (list): list of behavior codes defined as STATE event
|
|
745
|
+
events (list): list of events
|
|
746
|
+
subjects (dict): dictionary of subjects
|
|
747
|
+
time (Decimal): time
|
|
748
|
+
|
|
749
|
+
Returns:
|
|
750
|
+
dict: current states by subject. dict of list
|
|
751
|
+
"""
|
|
752
|
+
current_states = {}
|
|
753
|
+
for idx in subjects:
|
|
754
|
+
current_states[idx] = []
|
|
755
|
+
for sbc in state_behaviors_codes:
|
|
756
|
+
bl = [
|
|
757
|
+
(x[cfg.EVENT_BEHAVIOR_FIELD_IDX], x[cfg.EVENT_MODIFIER_FIELD_IDX])
|
|
758
|
+
for x in events
|
|
759
|
+
if x[cfg.EVENT_SUBJECT_FIELD_IDX] == subjects[idx][cfg.SUBJECT_NAME]
|
|
760
|
+
and x[cfg.EVENT_BEHAVIOR_FIELD_IDX] == sbc
|
|
761
|
+
and x[cfg.EVENT_TIME_FIELD_IDX] <= time
|
|
762
|
+
]
|
|
763
|
+
|
|
764
|
+
if len(bl) % 2: # test if odd
|
|
765
|
+
current_states[idx].append(bl[-1])
|
|
766
|
+
|
|
767
|
+
return current_states
|
|
768
|
+
|
|
769
|
+
|
|
770
|
+
def get_current_points_by_subject(
|
|
771
|
+
point_behaviors_codes: list,
|
|
772
|
+
events: list,
|
|
773
|
+
subjects: dict,
|
|
774
|
+
time: dec,
|
|
775
|
+
tolerance: dec,
|
|
776
|
+
include_modifiers: bool = False,
|
|
777
|
+
) -> dict:
|
|
778
|
+
"""
|
|
779
|
+
get point events for subjects between given time (time) and (time + tolerance)
|
|
780
|
+
includes modifiers
|
|
781
|
+
Args:
|
|
782
|
+
point_behaviors_codes (list): list of behavior codes defined as POINT event
|
|
783
|
+
events (list): list of events
|
|
784
|
+
subjects (dict): dictionary of subjects
|
|
785
|
+
time (Decimal): time (s)
|
|
786
|
+
tolerance (Decimal): tolerance (s)
|
|
787
|
+
include_modifiers (bool): True to include modifiers
|
|
788
|
+
|
|
789
|
+
Returns:
|
|
790
|
+
dict: current point behaviors by subject. dict of list
|
|
791
|
+
"""
|
|
792
|
+
|
|
793
|
+
current_points = {}
|
|
794
|
+
for idx in subjects:
|
|
795
|
+
current_points[idx] = []
|
|
796
|
+
for sbc in point_behaviors_codes:
|
|
797
|
+
# if include_modifiers:
|
|
798
|
+
point_events = [
|
|
799
|
+
(x[cfg.EVENT_BEHAVIOR_FIELD_IDX], x[cfg.EVENT_MODIFIER_FIELD_IDX])
|
|
800
|
+
for x in events
|
|
801
|
+
if x[cfg.EVENT_SUBJECT_FIELD_IDX] == subjects[idx]["name"]
|
|
802
|
+
and x[cfg.EVENT_BEHAVIOR_FIELD_IDX] == sbc
|
|
803
|
+
# and abs(x[EVENT_TIME_FIELD_IDX] - time) <= tolerance
|
|
804
|
+
and time <= x[cfg.EVENT_TIME_FIELD_IDX] < (time + tolerance)
|
|
805
|
+
]
|
|
806
|
+
|
|
807
|
+
# else:
|
|
808
|
+
# point_events = [x[EVENT_BEHAVIOR_FIELD_IDX] for x in events
|
|
809
|
+
# if x[EVENT_SUBJECT_FIELD_IDX] == subjects[idx]["name"]
|
|
810
|
+
# and x[EVENT_BEHAVIOR_FIELD_IDX] == sbc
|
|
811
|
+
# # and abs(x[EVENT_TIME_FIELD_IDX] - time) <= tolerance
|
|
812
|
+
# and time <= x[EVENT_TIME_FIELD_IDX] < (time + tolerance)]
|
|
813
|
+
for point_event in point_events:
|
|
814
|
+
current_points[idx].append(point_event)
|
|
815
|
+
|
|
816
|
+
return current_points
|
|
817
|
+
|
|
818
|
+
|
|
819
|
+
def check_txt_file(file_name: str) -> dict:
|
|
820
|
+
"""
|
|
821
|
+
Extract parameters of txt file (test for tsv csv)
|
|
822
|
+
|
|
823
|
+
Args:
|
|
824
|
+
filename (str): path of file to be analyzed
|
|
825
|
+
|
|
826
|
+
Returns:
|
|
827
|
+
dict: {"homogeneous": True or False,
|
|
828
|
+
"fields_number": number of fields,
|
|
829
|
+
"separator": separator char
|
|
830
|
+
}
|
|
831
|
+
"""
|
|
832
|
+
try:
|
|
833
|
+
# snif txt file
|
|
834
|
+
with open(file_name) as csvfile:
|
|
835
|
+
buff = csvfile.read(4096)
|
|
836
|
+
snif = csv.Sniffer()
|
|
837
|
+
dialect = snif.sniff(buff)
|
|
838
|
+
has_header = snif.has_header(buff)
|
|
839
|
+
|
|
840
|
+
csv.register_dialect("dialect", dialect)
|
|
841
|
+
rows_len: list = []
|
|
842
|
+
with open(file_name, "r") as f:
|
|
843
|
+
reader = csv.reader(f, dialect="dialect")
|
|
844
|
+
for row in reader:
|
|
845
|
+
if not row:
|
|
846
|
+
continue
|
|
847
|
+
"""
|
|
848
|
+
if len(row) not in rows_len:
|
|
849
|
+
rows_len.append(len(row))
|
|
850
|
+
if len(rows_len) > 1:
|
|
851
|
+
break
|
|
852
|
+
"""
|
|
853
|
+
rows_len.append(len(row))
|
|
854
|
+
|
|
855
|
+
rows_number = len(rows_len)
|
|
856
|
+
rows_uniq_len = set(rows_len)
|
|
857
|
+
|
|
858
|
+
# test if file empty
|
|
859
|
+
if not rows_uniq_len:
|
|
860
|
+
return {"error": "The file is empty"}
|
|
861
|
+
|
|
862
|
+
if len(rows_uniq_len) == 1:
|
|
863
|
+
return {
|
|
864
|
+
"homogeneous": True,
|
|
865
|
+
"fields number": rows_len[0],
|
|
866
|
+
"separator": dialect.delimiter,
|
|
867
|
+
"rows number": rows_number,
|
|
868
|
+
"has header": has_header,
|
|
869
|
+
}
|
|
870
|
+
else:
|
|
871
|
+
return {"homogeneous": False}
|
|
872
|
+
except Exception:
|
|
873
|
+
return {"error": str(sys.exc_info()[1])}
|
|
874
|
+
|
|
875
|
+
|
|
876
|
+
def extract_wav(ffmpeg_bin: str, media_file_path: str, tmp_dir: str) -> str:
|
|
877
|
+
"""
|
|
878
|
+
extract wav from media file and save file in tmp_dir
|
|
879
|
+
|
|
880
|
+
Args:
|
|
881
|
+
media_file_path (str): media file path
|
|
882
|
+
tmp_dir (str): temporary dir where to save wav file
|
|
883
|
+
|
|
884
|
+
Returns:
|
|
885
|
+
str: wav file path or "" if error
|
|
886
|
+
"""
|
|
887
|
+
|
|
888
|
+
wav_file_path = Path(tmp_dir) / Path(media_file_path + ".wav").name
|
|
889
|
+
|
|
890
|
+
# check if media file is a wav file
|
|
891
|
+
try:
|
|
892
|
+
wav = wave.open(media_file_path, "r")
|
|
893
|
+
wav.close()
|
|
894
|
+
logger.debug(f"{media_file_path} is a WAV file. Copying in the temp directory...")
|
|
895
|
+
copyfile(media_file_path, wav_file_path)
|
|
896
|
+
logger.debug(f"{media_file_path} copied in {wav_file_path}")
|
|
897
|
+
return str(wav_file_path)
|
|
898
|
+
except Exception:
|
|
899
|
+
if wav_file_path.is_file():
|
|
900
|
+
return str(wav_file_path)
|
|
901
|
+
# extract wav file using FFmpeg
|
|
902
|
+
|
|
903
|
+
p = subprocess.Popen(
|
|
904
|
+
f'"{ffmpeg_bin}" -i "{media_file_path}" -y -ac 1 -vn "{wav_file_path}"',
|
|
905
|
+
stdout=subprocess.PIPE,
|
|
906
|
+
stderr=subprocess.PIPE,
|
|
907
|
+
shell=True,
|
|
908
|
+
)
|
|
909
|
+
out, error = p.communicate()
|
|
910
|
+
out, error = out.decode("utf-8"), error.decode("utf-8")
|
|
911
|
+
logger.debug(f"{out}, {error}")
|
|
912
|
+
|
|
913
|
+
if "does not contain any stream" not in error:
|
|
914
|
+
if wav_file_path.is_file():
|
|
915
|
+
return str(wav_file_path)
|
|
916
|
+
return ""
|
|
917
|
+
else:
|
|
918
|
+
return ""
|
|
919
|
+
|
|
920
|
+
|
|
921
|
+
def decimal_default(obj):
|
|
922
|
+
if isinstance(obj, dec):
|
|
923
|
+
return float(round(obj, 3))
|
|
924
|
+
raise TypeError
|
|
925
|
+
|
|
926
|
+
|
|
927
|
+
def complete(lst: list, max_: int) -> list:
|
|
928
|
+
"""
|
|
929
|
+
complete list with empty string ("") until len = max
|
|
930
|
+
|
|
931
|
+
Args:
|
|
932
|
+
lst (list): list to complete
|
|
933
|
+
max_ (int): number of items to reach
|
|
934
|
+
|
|
935
|
+
Returns:
|
|
936
|
+
list: list completed to max_ items with empty string ("")
|
|
937
|
+
"""
|
|
938
|
+
while len(lst) < max_:
|
|
939
|
+
lst.append("")
|
|
940
|
+
return lst
|
|
941
|
+
|
|
942
|
+
|
|
943
|
+
def datetime_iso8601(dt) -> str:
|
|
944
|
+
"""
|
|
945
|
+
current date time in ISO8601 format without microseconds
|
|
946
|
+
example: 2019-06-13 10:01:02
|
|
947
|
+
|
|
948
|
+
Returns:
|
|
949
|
+
str: date time in ISO8601 format without microseconds
|
|
950
|
+
"""
|
|
951
|
+
return dt.isoformat(sep=" ", timespec="seconds")
|
|
952
|
+
|
|
953
|
+
|
|
954
|
+
def seconds_of_day(timestamp: dt.datetime) -> dec:
|
|
955
|
+
"""
|
|
956
|
+
return the number of seconds since start of the day
|
|
957
|
+
|
|
958
|
+
Returns:
|
|
959
|
+
dev: number of seconds since the start of the day
|
|
960
|
+
"""
|
|
961
|
+
|
|
962
|
+
# logger.debug("function: seconds_of_day")
|
|
963
|
+
# logger.debug(f"{timestamp = }")
|
|
964
|
+
|
|
965
|
+
t = timestamp.time()
|
|
966
|
+
return dec(t.hour * 3600 + t.minute * 60 + t.second + t.microsecond / 1000000).quantize(dec("0.001"))
|
|
967
|
+
|
|
968
|
+
|
|
969
|
+
def sorted_keys(d: dict) -> list:
|
|
970
|
+
"""
|
|
971
|
+
return list of sorted keys of provided dictionary
|
|
972
|
+
|
|
973
|
+
Args:
|
|
974
|
+
d (dict): dictionary
|
|
975
|
+
|
|
976
|
+
Returns:
|
|
977
|
+
list: dictionary keys sorted numerically
|
|
978
|
+
"""
|
|
979
|
+
return [str(x) for x in sorted([int(x) for x in d.keys()])]
|
|
980
|
+
|
|
981
|
+
|
|
982
|
+
def intfloatstr(s: str) -> int:
|
|
983
|
+
"""
|
|
984
|
+
convert str in int or float or return str
|
|
985
|
+
"""
|
|
986
|
+
|
|
987
|
+
try:
|
|
988
|
+
return int(s)
|
|
989
|
+
except Exception:
|
|
990
|
+
try:
|
|
991
|
+
return f"{float(s):0.3f}"
|
|
992
|
+
except Exception:
|
|
993
|
+
return s
|
|
994
|
+
|
|
995
|
+
|
|
996
|
+
def distance(p1: tuple, p2: tuple) -> float:
|
|
997
|
+
"""
|
|
998
|
+
euclidean distance between 2 points
|
|
999
|
+
"""
|
|
1000
|
+
x1, y1 = p1
|
|
1001
|
+
x2, y2 = p2
|
|
1002
|
+
return ((x1 - x2) ** 2 + (y1 - y2) ** 2) ** 0.5
|
|
1003
|
+
|
|
1004
|
+
|
|
1005
|
+
def angle(vertex: tuple, side1: tuple, side2: tuple) -> float:
|
|
1006
|
+
"""
|
|
1007
|
+
Determine the angle between 3 points (p1 must be the vertex)
|
|
1008
|
+
return angle in degree
|
|
1009
|
+
|
|
1010
|
+
Args:
|
|
1011
|
+
vertex (tuple): vertex
|
|
1012
|
+
side1 (tuple): side 1
|
|
1013
|
+
side2 (tuple): side 2
|
|
1014
|
+
|
|
1015
|
+
Returns:
|
|
1016
|
+
float: angle between side1 - vertex - side2
|
|
1017
|
+
"""
|
|
1018
|
+
return (
|
|
1019
|
+
math.acos(
|
|
1020
|
+
(distance(vertex, side1) ** 2 + distance(vertex, side2) ** 2 - distance(side1, side2) ** 2)
|
|
1021
|
+
/ (2 * distance(vertex, side1) * distance(vertex, side2))
|
|
1022
|
+
)
|
|
1023
|
+
/ math.pi
|
|
1024
|
+
* 180
|
|
1025
|
+
)
|
|
1026
|
+
|
|
1027
|
+
|
|
1028
|
+
def oriented_angle(P1: tuple, P2: tuple, P3: tuple) -> float:
|
|
1029
|
+
"""
|
|
1030
|
+
Calculate the oriented angle between two segments.
|
|
1031
|
+
|
|
1032
|
+
Args:
|
|
1033
|
+
P1 (tuple): Coordinates of the vertex
|
|
1034
|
+
P2 (tuple): Coordinates of the first point
|
|
1035
|
+
P3 (tuple): Coordinates of the second point
|
|
1036
|
+
|
|
1037
|
+
Returns:
|
|
1038
|
+
float: The oriented angle between the two segments in degrees.
|
|
1039
|
+
"""
|
|
1040
|
+
|
|
1041
|
+
x1, y1 = P1
|
|
1042
|
+
x2, y2 = P2
|
|
1043
|
+
x3, y3 = P1
|
|
1044
|
+
x4, y4 = P3
|
|
1045
|
+
|
|
1046
|
+
angle_AB = math.atan2(y2 - y1, x2 - x1)
|
|
1047
|
+
angle_CD = math.atan2(y4 - y3, x4 - x3)
|
|
1048
|
+
|
|
1049
|
+
oriented_angle = math.degrees(angle_AB - angle_CD)
|
|
1050
|
+
|
|
1051
|
+
return oriented_angle
|
|
1052
|
+
|
|
1053
|
+
|
|
1054
|
+
def oriented_angle_trigo(B: Tuple[float, float], A: Tuple[float, float], C: Tuple[float, float]) -> float:
|
|
1055
|
+
"""
|
|
1056
|
+
Calculates the oriented angle between vectors BA and BC, in degrees.
|
|
1057
|
+
The angle is positive in the counter-clockwise (trigonometric) direction.
|
|
1058
|
+
|
|
1059
|
+
Parameters:
|
|
1060
|
+
B: The pivot point (the origin of the vectors BA and BC).
|
|
1061
|
+
A, C: Points that define the vectors.
|
|
1062
|
+
|
|
1063
|
+
Returns:
|
|
1064
|
+
Angle in degrees, between 0 and 360.
|
|
1065
|
+
"""
|
|
1066
|
+
# Vectors BA and BC
|
|
1067
|
+
v1 = (A[0] - B[0], A[1] - B[1])
|
|
1068
|
+
v2 = (C[0] - B[0], C[1] - B[1])
|
|
1069
|
+
|
|
1070
|
+
# Dot product and 2D cross product (determinant)
|
|
1071
|
+
dot = v1[0] * v2[0] + v1[1] * v2[1]
|
|
1072
|
+
det = v1[0] * v2[1] - v1[1] * v2[0]
|
|
1073
|
+
|
|
1074
|
+
# Signed angle in radians, then converted to degrees
|
|
1075
|
+
angle_rad = math.atan2(det, dot)
|
|
1076
|
+
angle_deg = math.degrees(angle_rad)
|
|
1077
|
+
|
|
1078
|
+
if angle_deg < 0:
|
|
1079
|
+
angle_deg += 360
|
|
1080
|
+
|
|
1081
|
+
return angle_deg
|
|
1082
|
+
|
|
1083
|
+
|
|
1084
|
+
def mem_info():
|
|
1085
|
+
"""
|
|
1086
|
+
get info about total mem, used mem and available mem using:
|
|
1087
|
+
"free -m" command on Linux
|
|
1088
|
+
"top -l 1 -s 0" command in MacOS
|
|
1089
|
+
"systeminfo" command on Windows
|
|
1090
|
+
|
|
1091
|
+
Returns:
|
|
1092
|
+
bool: True if error
|
|
1093
|
+
dict: values ("total_memory", "used_memory", "free_memory")
|
|
1094
|
+
"""
|
|
1095
|
+
|
|
1096
|
+
if sys.platform.startswith("linux"):
|
|
1097
|
+
try:
|
|
1098
|
+
process = subprocess.run(["free", "-m"], stdout=subprocess.PIPE)
|
|
1099
|
+
# out, err = process.communicate()
|
|
1100
|
+
out = process.stdout
|
|
1101
|
+
_, tot_mem, used_mem, _, _, _, available_mem = [x.decode("utf-8") for x in out.split(b"\n")[1].split(b" ") if x != b""]
|
|
1102
|
+
return False, {
|
|
1103
|
+
"total_memory": int(tot_mem),
|
|
1104
|
+
"used_memory": int(used_mem),
|
|
1105
|
+
"free_memory": int(available_mem),
|
|
1106
|
+
}
|
|
1107
|
+
except Exception:
|
|
1108
|
+
return True, {"msg": error_info(sys.exc_info())[0]}
|
|
1109
|
+
|
|
1110
|
+
if sys.platform.startswith("darwin"):
|
|
1111
|
+
try:
|
|
1112
|
+
output = subprocess.check_output(("top", "-l", "1", "-s", "0"))
|
|
1113
|
+
r = [x.decode("utf-8") for x in output.split(b"\n") if b"PhysMem" in x][0].split(" ")
|
|
1114
|
+
used_mem, free_mem = int(r[1].replace("M", "")), int(r[5].replace("M", ""))
|
|
1115
|
+
return False, {"total_memory": used_mem + free_mem, "used_memory": used_mem, "free_memory": free_mem}
|
|
1116
|
+
except Exception:
|
|
1117
|
+
return True, {"msg": error_info(sys.exc_info())[0]}
|
|
1118
|
+
|
|
1119
|
+
if sys.platform.startswith("win"):
|
|
1120
|
+
try:
|
|
1121
|
+
output = subprocess.run(["wmic", "computersystem", "get", "TotalPhysicalMemory", "/", "Value"], stdout=subprocess.PIPE)
|
|
1122
|
+
tot_mem = int(output.stdout.strip().split(b"=")[-1].decode("utf-8")) / 1024 / 1024
|
|
1123
|
+
|
|
1124
|
+
output = subprocess.run(["wmic", "OS", "get", "FreePhysicalMemory", "/", "Value"], stdout=subprocess.PIPE)
|
|
1125
|
+
free_mem = int(output.stdout.strip().split(b"=")[-1].decode("utf-8")) / 1024
|
|
1126
|
+
return False, {"total_memory": tot_mem, "free_memory": free_mem}
|
|
1127
|
+
|
|
1128
|
+
except Exception:
|
|
1129
|
+
return True, {"msg": error_info(sys.exc_info())[0]}
|
|
1130
|
+
|
|
1131
|
+
return True, {"msg": "Unknown operating system"}
|
|
1132
|
+
|
|
1133
|
+
|
|
1134
|
+
def polygon_area(poly: list) -> float:
|
|
1135
|
+
"""
|
|
1136
|
+
area of polygon
|
|
1137
|
+
from http://www.mathopenref.com/coordpolygonarea.html
|
|
1138
|
+
"""
|
|
1139
|
+
tot = 0
|
|
1140
|
+
for p in range(len(poly)):
|
|
1141
|
+
x1, y1 = poly[p]
|
|
1142
|
+
n = (p + 1) % len(poly)
|
|
1143
|
+
x2, y2 = poly[n]
|
|
1144
|
+
tot += x1 * y2 - x2 * y1
|
|
1145
|
+
|
|
1146
|
+
return abs(tot / 2)
|
|
1147
|
+
|
|
1148
|
+
|
|
1149
|
+
def polyline_length(poly: list) -> float:
|
|
1150
|
+
"""
|
|
1151
|
+
length of polyline
|
|
1152
|
+
"""
|
|
1153
|
+
tot = 0
|
|
1154
|
+
for p in range(1, len(poly)):
|
|
1155
|
+
x1, y1 = poly[p - 1]
|
|
1156
|
+
x2, y2 = poly[p]
|
|
1157
|
+
tot += ((x1 - x2) ** 2 + (y1 - y2) ** 2) ** 0.5
|
|
1158
|
+
|
|
1159
|
+
return tot
|
|
1160
|
+
|
|
1161
|
+
|
|
1162
|
+
def url2path(url: str) -> str:
|
|
1163
|
+
"""
|
|
1164
|
+
convert URL in local path name
|
|
1165
|
+
under windows, check if path name begin with /
|
|
1166
|
+
"""
|
|
1167
|
+
|
|
1168
|
+
path = urllib.parse.unquote(urllib.parse.urlparse(url).path)
|
|
1169
|
+
# check / for windows
|
|
1170
|
+
if sys.platform.startswith("win") and path.startswith("/"):
|
|
1171
|
+
path = path[1:]
|
|
1172
|
+
return path
|
|
1173
|
+
|
|
1174
|
+
|
|
1175
|
+
def float2decimal(f):
|
|
1176
|
+
"""
|
|
1177
|
+
return decimal value
|
|
1178
|
+
"""
|
|
1179
|
+
return dec(str(f))
|
|
1180
|
+
|
|
1181
|
+
|
|
1182
|
+
def time2seconds(time_: str) -> dec:
|
|
1183
|
+
"""
|
|
1184
|
+
convert hh:mm:ss.s to number of seconds (decimal)
|
|
1185
|
+
|
|
1186
|
+
Args
|
|
1187
|
+
time (str): time (hh:mm:ss.zzz format)
|
|
1188
|
+
|
|
1189
|
+
Returns:
|
|
1190
|
+
Decimal: time in seconds
|
|
1191
|
+
"""
|
|
1192
|
+
|
|
1193
|
+
if " " in time_:
|
|
1194
|
+
try:
|
|
1195
|
+
return dec(str(dt.datetime.strptime(time_, "%Y-%m-%d %H:%M:%S.%f").timestamp()))
|
|
1196
|
+
except Exception:
|
|
1197
|
+
return dec("0.000")
|
|
1198
|
+
else:
|
|
1199
|
+
try:
|
|
1200
|
+
flag_neg = "-" in time_
|
|
1201
|
+
time_ = time_.replace("-", "")
|
|
1202
|
+
tsplit = time_.split(":")
|
|
1203
|
+
h, m, s = int(tsplit[0]), int(tsplit[1]), dec(tsplit[2])
|
|
1204
|
+
return dec(-(h * 3600 + m * 60 + s)) if flag_neg else dec(h * 3600 + m * 60 + s)
|
|
1205
|
+
except Exception:
|
|
1206
|
+
return dec("0.000")
|
|
1207
|
+
|
|
1208
|
+
|
|
1209
|
+
def seconds2time(sec: dec | None) -> str:
|
|
1210
|
+
"""
|
|
1211
|
+
convert seconds to hh:mm:ss.sss format
|
|
1212
|
+
|
|
1213
|
+
Args:
|
|
1214
|
+
sec (Decimal): time in seconds
|
|
1215
|
+
Returns:
|
|
1216
|
+
str: time in format hh:mm:ss
|
|
1217
|
+
"""
|
|
1218
|
+
if sec is None:
|
|
1219
|
+
return cfg.NA
|
|
1220
|
+
|
|
1221
|
+
if math.isnan(sec):
|
|
1222
|
+
return cfg.NA
|
|
1223
|
+
|
|
1224
|
+
# if sec > one day treat as date
|
|
1225
|
+
if sec > cfg.DATE_CUTOFF:
|
|
1226
|
+
t = dt.datetime.fromtimestamp(float(sec))
|
|
1227
|
+
return f"{t:%Y-%m-%d %H:%M:%S}.{t.microsecond / 1000:03.0f}"
|
|
1228
|
+
|
|
1229
|
+
neg_sign = "-" * (sec < 0)
|
|
1230
|
+
abs_sec = abs(sec)
|
|
1231
|
+
|
|
1232
|
+
hours = 0
|
|
1233
|
+
|
|
1234
|
+
minutes = int(abs_sec / 60)
|
|
1235
|
+
if minutes >= 60:
|
|
1236
|
+
hours = int(minutes / 60)
|
|
1237
|
+
minutes = minutes % 60
|
|
1238
|
+
|
|
1239
|
+
ssecs = f"{abs_sec - hours * 3600 - minutes * 60:06.3f}"
|
|
1240
|
+
|
|
1241
|
+
return f"{neg_sign}{hours:02}:{minutes:02}:{ssecs}"
|
|
1242
|
+
|
|
1243
|
+
|
|
1244
|
+
def safeFileName(s: str) -> str:
|
|
1245
|
+
"""
|
|
1246
|
+
replace characters not allowed in file name by _
|
|
1247
|
+
"""
|
|
1248
|
+
fileName = s
|
|
1249
|
+
notAllowedChars = ["/", "\\", ":", "*", "?", '"', "<", ">", "|", "\n", "\r"]
|
|
1250
|
+
for char in notAllowedChars:
|
|
1251
|
+
fileName = fileName.replace(char, "_")
|
|
1252
|
+
|
|
1253
|
+
return fileName
|
|
1254
|
+
|
|
1255
|
+
|
|
1256
|
+
def safe_xl_worksheet_title(title: str, output_format: str):
|
|
1257
|
+
"""
|
|
1258
|
+
sanitize the XLS and XLSX worksheet title
|
|
1259
|
+
|
|
1260
|
+
Args:
|
|
1261
|
+
title (str): title for worksheet
|
|
1262
|
+
output_format (str): xls or xlsx
|
|
1263
|
+
"""
|
|
1264
|
+
if output_format in ("xls", "xlsx"):
|
|
1265
|
+
if output_format in ("xls"):
|
|
1266
|
+
title = title[:31]
|
|
1267
|
+
for forbidden_char in cfg.EXCEL_FORBIDDEN_CHARACTERS:
|
|
1268
|
+
title = title.replace(forbidden_char, " ")
|
|
1269
|
+
return title
|
|
1270
|
+
|
|
1271
|
+
|
|
1272
|
+
def eol2space(s: str) -> str:
|
|
1273
|
+
"""
|
|
1274
|
+
replace EOL char by space for all platforms
|
|
1275
|
+
|
|
1276
|
+
Args:
|
|
1277
|
+
s (str): string to be converted
|
|
1278
|
+
|
|
1279
|
+
Returns:
|
|
1280
|
+
str: string where /rn /r /n are converted in space
|
|
1281
|
+
"""
|
|
1282
|
+
return s.replace("\r\n", " ").replace("\n", " ").replace("\r", " ")
|
|
1283
|
+
|
|
1284
|
+
|
|
1285
|
+
def test_ffmpeg_path(FFmpegPath: str) -> Tuple[bool, str]:
|
|
1286
|
+
"""
|
|
1287
|
+
test if ffmpeg has valid path
|
|
1288
|
+
|
|
1289
|
+
Args:
|
|
1290
|
+
FFmpegPath (str): ffmepg path to test
|
|
1291
|
+
|
|
1292
|
+
Returns:
|
|
1293
|
+
bool: True: path found
|
|
1294
|
+
str: message
|
|
1295
|
+
"""
|
|
1296
|
+
|
|
1297
|
+
out, error = subprocess.Popen(f'"{FFmpegPath}" -version', stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True).communicate()
|
|
1298
|
+
logger.debug(f"test ffmpeg path output: {out}")
|
|
1299
|
+
logger.debug(f"test ffmpeg path error: {error}")
|
|
1300
|
+
|
|
1301
|
+
if (b"avconv" in out) or (b"the Libav developers" in error):
|
|
1302
|
+
return False, "Please use FFmpeg from https://www.ffmpeg.org in place of FFmpeg from Libav project."
|
|
1303
|
+
|
|
1304
|
+
if (b"ffmpeg version" not in out) and (b"ffmpeg version" not in error):
|
|
1305
|
+
return False, "FFmpeg is required but it was not found.<br>See https://www.ffmpeg.org"
|
|
1306
|
+
|
|
1307
|
+
return True, ""
|
|
1308
|
+
|
|
1309
|
+
|
|
1310
|
+
def check_ffmpeg_path() -> Tuple[bool, str]:
|
|
1311
|
+
"""
|
|
1312
|
+
check for ffmpeg path
|
|
1313
|
+
firstly search for embedded version
|
|
1314
|
+
if not found search for system wide version (must be in the path)
|
|
1315
|
+
|
|
1316
|
+
Returns:
|
|
1317
|
+
bool: True if ffmpeg path found else False
|
|
1318
|
+
str: if bool True returns ffmpegpath else returns error message
|
|
1319
|
+
"""
|
|
1320
|
+
|
|
1321
|
+
# search embedded ffmpeg
|
|
1322
|
+
if sys.platform.startswith("linux") or sys.platform.startswith("darwin"):
|
|
1323
|
+
ffmpeg_executable = Path("ffmpeg")
|
|
1324
|
+
elif sys.platform.startswith("win"):
|
|
1325
|
+
ffmpeg_executable = Path("ffmpeg.exe")
|
|
1326
|
+
|
|
1327
|
+
ffmpeg_path = Path(__file__).parent / "misc" / ffmpeg_executable
|
|
1328
|
+
|
|
1329
|
+
if not ffmpeg_path.is_file():
|
|
1330
|
+
# search global ffmpeg
|
|
1331
|
+
ffmpeg_path = ffmpeg_executable
|
|
1332
|
+
|
|
1333
|
+
# test ffmpeg
|
|
1334
|
+
r, msg = test_ffmpeg_path(str(ffmpeg_path))
|
|
1335
|
+
if r:
|
|
1336
|
+
return True, str(ffmpeg_path)
|
|
1337
|
+
else:
|
|
1338
|
+
return False, "FFmpeg is not available"
|
|
1339
|
+
|
|
1340
|
+
|
|
1341
|
+
def smart_size_format(n: Union[float, int, str, None]) -> str:
|
|
1342
|
+
"""
|
|
1343
|
+
format with kb, Mb or Gb in base of value
|
|
1344
|
+
"""
|
|
1345
|
+
if n is None:
|
|
1346
|
+
return cfg.NA
|
|
1347
|
+
if str(n) == "NA":
|
|
1348
|
+
return cfg.NA
|
|
1349
|
+
if math.isnan(n):
|
|
1350
|
+
return cfg.NA
|
|
1351
|
+
if n < 1_000:
|
|
1352
|
+
return f"{n:,.1f} b"
|
|
1353
|
+
if n < 1_000_000:
|
|
1354
|
+
return f"{n / 1_000:,.1f} Kb"
|
|
1355
|
+
if n < 1_000_000_000:
|
|
1356
|
+
return f"{n / 1_000_000:,.1f} Mb"
|
|
1357
|
+
return f"{n / 1_000_000_000:,.1f} Gb"
|
|
1358
|
+
|
|
1359
|
+
|
|
1360
|
+
def get_systeminfo() -> str:
|
|
1361
|
+
"""
|
|
1362
|
+
returns info about the system
|
|
1363
|
+
"""
|
|
1364
|
+
|
|
1365
|
+
mpv_lib_version_, mpv_lib_file_path, mpv_api_version = mpv_lib_version()
|
|
1366
|
+
|
|
1367
|
+
system_info = (
|
|
1368
|
+
f"BORIS version: {version.__version__}\n"
|
|
1369
|
+
f"OS: {platform.uname().system} {platform.uname().release} {platform.uname().version}\n"
|
|
1370
|
+
f"CPU: {platform.uname().machine} {platform.uname().processor}\n"
|
|
1371
|
+
f"Python {platform.python_version()} ({'64-bit' if sys.maxsize > 2**32 else '32-bit'})\n"
|
|
1372
|
+
f"Qt {qVersion()} - PySide {pyside6_version}\n"
|
|
1373
|
+
f"MPV library version: {mpv_lib_version_}\n"
|
|
1374
|
+
f"MPV API version: {mpv_api_version}\n"
|
|
1375
|
+
f"MPV library file path: {mpv_lib_file_path}\n\n"
|
|
1376
|
+
)
|
|
1377
|
+
|
|
1378
|
+
r, memory = mem_info()
|
|
1379
|
+
if not r:
|
|
1380
|
+
system_info += (
|
|
1381
|
+
f"Memory (RAM) Total: {memory.get('total_memory', 'Not available'):.2f} Mb "
|
|
1382
|
+
f"Free: {memory.get('free_memory', 'Not available'):.2f} Mb\n\n"
|
|
1383
|
+
)
|
|
1384
|
+
|
|
1385
|
+
return system_info
|
|
1386
|
+
|
|
1387
|
+
"""
|
|
1388
|
+
# system info
|
|
1389
|
+
systeminfo = ""
|
|
1390
|
+
if sys.platform.startswith("win"):
|
|
1391
|
+
# systeminfo = subprocess.getoutput("systeminfo")
|
|
1392
|
+
systeminfo = subprocess.run("systeminfo /FO csv /NH", capture_output=True, text=True, encoding="mbcs", shell=True).stdout
|
|
1393
|
+
|
|
1394
|
+
import csv
|
|
1395
|
+
from io import StringIO
|
|
1396
|
+
|
|
1397
|
+
# Parse it as CSV
|
|
1398
|
+
f = StringIO(systeminfo)
|
|
1399
|
+
reader = csv.reader(f)
|
|
1400
|
+
parsed_data = list(reader)[0]
|
|
1401
|
+
# Print specific fields by index
|
|
1402
|
+
info_to_show = ""
|
|
1403
|
+
info_to_show += f"Computer Name: {parsed_data[0]}\n"
|
|
1404
|
+
info_to_show += f"OS Name: {parsed_data[1]}\n"
|
|
1405
|
+
info_to_show += f"OS Version: {parsed_data[2]}\n"
|
|
1406
|
+
info_to_show += f"System Manufacturer: {parsed_data[11]}\n"
|
|
1407
|
+
info_to_show += f"System Model: {parsed_data[12]}\n"
|
|
1408
|
+
info_to_show += f"Processor: {parsed_data[14]}\n"
|
|
1409
|
+
info_to_show += f"Locale: {parsed_data[19]}\n"
|
|
1410
|
+
info_to_show += f"Installed Memory: {parsed_data[22]}\n"
|
|
1411
|
+
|
|
1412
|
+
# info about graphic card
|
|
1413
|
+
graphic_info = subprocess.run(
|
|
1414
|
+
"wmic path win32_videocontroller get name", capture_output=True, text=True, encoding="mbcs", shell=True
|
|
1415
|
+
).stdout
|
|
1416
|
+
info_to_show += graphic_info.replace("\n", "").replace("Name", "Graphic card model")
|
|
1417
|
+
|
|
1418
|
+
systeminfo = info_to_show
|
|
1419
|
+
|
|
1420
|
+
if sys.platform.startswith("linux"):
|
|
1421
|
+
systeminfo = subprocess.getoutput("cat /etc/*rel*; uname -a")
|
|
1422
|
+
|
|
1423
|
+
return systeminfo
|
|
1424
|
+
"""
|
|
1425
|
+
|
|
1426
|
+
|
|
1427
|
+
def ffprobe_media_analysis(ffmpeg_bin: str, file_name: str) -> dict:
|
|
1428
|
+
"""
|
|
1429
|
+
analyse video parameters with ffprobe (if available)
|
|
1430
|
+
|
|
1431
|
+
Args:
|
|
1432
|
+
ffmpeg_bin (str): ffmpeg path
|
|
1433
|
+
file_name (str): path of media file
|
|
1434
|
+
|
|
1435
|
+
Returns:
|
|
1436
|
+
dict
|
|
1437
|
+
"""
|
|
1438
|
+
|
|
1439
|
+
# check ffprobe executable in same place than ffmpeg
|
|
1440
|
+
ffprobe_bin = ffmpeg_bin.replace("ffmpeg", "ffprobe")
|
|
1441
|
+
if not Path(ffprobe_bin).is_file():
|
|
1442
|
+
if which(ffprobe_bin) is None:
|
|
1443
|
+
return {"error": "ffprobe not found"}
|
|
1444
|
+
|
|
1445
|
+
command = f'"{ffprobe_bin}" -hide_banner -v error -print_format json -show_format -show_streams "{file_name}"'
|
|
1446
|
+
|
|
1447
|
+
p = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
|
|
1448
|
+
out, error = p.communicate()
|
|
1449
|
+
if error:
|
|
1450
|
+
if b"invalid data" in error:
|
|
1451
|
+
return {"error": f"{error}"}
|
|
1452
|
+
|
|
1453
|
+
try:
|
|
1454
|
+
hasVideo = False
|
|
1455
|
+
hasAudio = False
|
|
1456
|
+
"""bitrate = None"""
|
|
1457
|
+
video_bitrate = None
|
|
1458
|
+
audio_bitrate = []
|
|
1459
|
+
resolution = None
|
|
1460
|
+
fps: float = 0
|
|
1461
|
+
sample_rate = None
|
|
1462
|
+
duration = None
|
|
1463
|
+
audio_duration = cfg.NA
|
|
1464
|
+
frames_number = None
|
|
1465
|
+
size = None
|
|
1466
|
+
audio_codec = None
|
|
1467
|
+
video_codec = None
|
|
1468
|
+
|
|
1469
|
+
video_param = json.loads(out.decode("utf-8"))
|
|
1470
|
+
if "size" in video_param["format"]:
|
|
1471
|
+
size = int(video_param["format"]["size"])
|
|
1472
|
+
|
|
1473
|
+
for stream in video_param["streams"]:
|
|
1474
|
+
if stream["codec_type"] == "video":
|
|
1475
|
+
hasVideo = True
|
|
1476
|
+
video_bitrate = int(stream["bit_rate"]) if "bit_rate" in stream else None
|
|
1477
|
+
resolution = f"{stream['width']}x{stream['height']}"
|
|
1478
|
+
|
|
1479
|
+
"""
|
|
1480
|
+
if "avg_frame_rate" in stream:
|
|
1481
|
+
if stream["avg_frame_rate"] == "0/0":
|
|
1482
|
+
fps = 0
|
|
1483
|
+
else:
|
|
1484
|
+
try:
|
|
1485
|
+
fps = eval(stream["avg_frame_rate"])
|
|
1486
|
+
except Exception:
|
|
1487
|
+
fps = 0
|
|
1488
|
+
"""
|
|
1489
|
+
if "r_frame_rate" in stream:
|
|
1490
|
+
if stream["r_frame_rate"] == "0/0":
|
|
1491
|
+
fps = 0
|
|
1492
|
+
else:
|
|
1493
|
+
try:
|
|
1494
|
+
fps = eval(stream["r_frame_rate"])
|
|
1495
|
+
except Exception:
|
|
1496
|
+
fps = 0
|
|
1497
|
+
if fps >= 1000 and "avg_frame_rate" in stream: # case for some h265 video ("r_frame_rate": "1200000/1")
|
|
1498
|
+
try:
|
|
1499
|
+
fps = eval(stream["avg_frame_rate"])
|
|
1500
|
+
except Exception:
|
|
1501
|
+
pass
|
|
1502
|
+
|
|
1503
|
+
fps = round(fps, 3)
|
|
1504
|
+
|
|
1505
|
+
if "duration" in stream:
|
|
1506
|
+
duration = float(stream["duration"])
|
|
1507
|
+
if "duration_ts" in stream:
|
|
1508
|
+
frames_number = int(stream["duration_ts"])
|
|
1509
|
+
elif "nb_frames" in stream:
|
|
1510
|
+
frames_number = int(stream["nb_frames"])
|
|
1511
|
+
else:
|
|
1512
|
+
frames_number = None
|
|
1513
|
+
|
|
1514
|
+
video_codec = stream["codec_long_name"] if "codec_long_name" in stream else None
|
|
1515
|
+
|
|
1516
|
+
if stream["codec_type"] == "audio":
|
|
1517
|
+
hasAudio = True
|
|
1518
|
+
sample_rate = float(stream["sample_rate"]) if "sample_rate" in stream else cfg.NA
|
|
1519
|
+
# TODO manage audio_duration parameter
|
|
1520
|
+
audio_duration = float(stream["duration"]) if "duration" in stream else cfg.NA
|
|
1521
|
+
audio_codec = stream["codec_long_name"]
|
|
1522
|
+
audio_bitrate.append(int(stream.get("bit_rate", 0)))
|
|
1523
|
+
|
|
1524
|
+
# check duration
|
|
1525
|
+
if duration is None:
|
|
1526
|
+
if "duration" in video_param["format"]:
|
|
1527
|
+
duration = float(video_param["format"]["duration"])
|
|
1528
|
+
else:
|
|
1529
|
+
duration = 0
|
|
1530
|
+
|
|
1531
|
+
# check bit rate
|
|
1532
|
+
if "bit_rate" in video_param["format"]:
|
|
1533
|
+
all_bitrate = int(video_param["format"]["bit_rate"])
|
|
1534
|
+
else:
|
|
1535
|
+
all_bitrate = None
|
|
1536
|
+
|
|
1537
|
+
if video_bitrate is None and all_bitrate is not None:
|
|
1538
|
+
video_bitrate = all_bitrate - sum(audio_bitrate)
|
|
1539
|
+
|
|
1540
|
+
# extract format long name
|
|
1541
|
+
format_long_name = video_param["format"]["format_long_name"] if "format_long_name" in video_param["format"] else cfg.NA
|
|
1542
|
+
|
|
1543
|
+
# extract creation time ("creation_time": "2023-03-22T16:50:32.000000Z")
|
|
1544
|
+
creation_time = cfg.NA
|
|
1545
|
+
if "tags" in video_param["format"] and "creation_time" in video_param["format"]["tags"]:
|
|
1546
|
+
creation_time = video_param["format"]["tags"]["creation_time"].replace("T", " ")
|
|
1547
|
+
if "." in creation_time:
|
|
1548
|
+
creation_time = creation_time.split(".")[0]
|
|
1549
|
+
|
|
1550
|
+
return {
|
|
1551
|
+
"analysis_program": "ffprobe",
|
|
1552
|
+
"frames_number": frames_number,
|
|
1553
|
+
"duration_ms": duration * 1000,
|
|
1554
|
+
"duration": duration,
|
|
1555
|
+
"audio_duration": audio_duration,
|
|
1556
|
+
"fps": fps,
|
|
1557
|
+
"has_video": hasVideo,
|
|
1558
|
+
"has_audio": hasAudio,
|
|
1559
|
+
"bitrate": video_bitrate,
|
|
1560
|
+
"resolution": resolution,
|
|
1561
|
+
"sample_rate": sample_rate,
|
|
1562
|
+
"file size": size,
|
|
1563
|
+
"audio_codec": audio_codec,
|
|
1564
|
+
"video_codec": video_codec,
|
|
1565
|
+
"creation_time": creation_time,
|
|
1566
|
+
"format_long_name": format_long_name,
|
|
1567
|
+
}
|
|
1568
|
+
|
|
1569
|
+
except Exception as e:
|
|
1570
|
+
return {"error": str(e)}
|
|
1571
|
+
|
|
1572
|
+
|
|
1573
|
+
def accurate_media_analysis(ffmpeg_bin: str, file_name: str) -> dict:
|
|
1574
|
+
"""
|
|
1575
|
+
analyse frame rate and video duration with ffprobe or ffmpeg if ffprobe not available
|
|
1576
|
+
Returns parameters: duration, duration_ms, bitrate, frames_number, fps, has_video (True/False), has_audio (True/False)
|
|
1577
|
+
|
|
1578
|
+
Args:
|
|
1579
|
+
ffmpeg_bin (str): ffmpeg path
|
|
1580
|
+
file_name (str): path of media file
|
|
1581
|
+
|
|
1582
|
+
Returns:
|
|
1583
|
+
dict containing keys: duration, duration_ms, frames_number, bitrate, fps, has_video, has_audio
|
|
1584
|
+
|
|
1585
|
+
"""
|
|
1586
|
+
|
|
1587
|
+
ffprobe_results = ffprobe_media_analysis(ffmpeg_bin, file_name)
|
|
1588
|
+
|
|
1589
|
+
logger.debug(f"file_name: {file_name}")
|
|
1590
|
+
logger.debug(f"ffprobe_results: {ffprobe_results}")
|
|
1591
|
+
|
|
1592
|
+
if ("error" not in ffprobe_results) and (ffprobe_results["bitrate"] is not None):
|
|
1593
|
+
return ffprobe_results
|
|
1594
|
+
else:
|
|
1595
|
+
# use ffmpeg
|
|
1596
|
+
command = f'"{ffmpeg_bin}" -hide_banner -i "{file_name}" > {"NUL" if sys.platform.startswith("win") else "/dev/null"}'
|
|
1597
|
+
|
|
1598
|
+
p = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
|
|
1599
|
+
|
|
1600
|
+
duration, fps, hasVideo, hasAudio, bitrate = 0, 0, False, False, None
|
|
1601
|
+
try:
|
|
1602
|
+
_, error = p.communicate()
|
|
1603
|
+
except Exception as e:
|
|
1604
|
+
return {"error": str(e)}
|
|
1605
|
+
|
|
1606
|
+
rows = error.split(b"\n")
|
|
1607
|
+
|
|
1608
|
+
# check if file found and if invalid data found
|
|
1609
|
+
for row in rows:
|
|
1610
|
+
if b"No such file or directory" in row:
|
|
1611
|
+
return {"error": "No such file or directory"}
|
|
1612
|
+
if b"Invalid data found when processing input" in row:
|
|
1613
|
+
return {"error": "This file does not seem to be a media file"}
|
|
1614
|
+
|
|
1615
|
+
# video duration
|
|
1616
|
+
try:
|
|
1617
|
+
for row in rows:
|
|
1618
|
+
if b"Duration" in row:
|
|
1619
|
+
duration = time2seconds(row.split(b"Duration: ")[1].split(b",")[0].strip().decode("utf-8"))
|
|
1620
|
+
break
|
|
1621
|
+
except Exception:
|
|
1622
|
+
duration = 0
|
|
1623
|
+
|
|
1624
|
+
# bitrate
|
|
1625
|
+
try:
|
|
1626
|
+
for row in rows:
|
|
1627
|
+
if b"bitrate:" in row:
|
|
1628
|
+
re_results = re.search(b"bitrate: (.{1,10}) kb", row, re.IGNORECASE)
|
|
1629
|
+
if re_results:
|
|
1630
|
+
bitrate = int(re_results.group(1).strip()) * 1000
|
|
1631
|
+
break
|
|
1632
|
+
except Exception:
|
|
1633
|
+
bitrate = None
|
|
1634
|
+
|
|
1635
|
+
# fps
|
|
1636
|
+
fps = 0
|
|
1637
|
+
try:
|
|
1638
|
+
for row in rows:
|
|
1639
|
+
if b" fps," in row:
|
|
1640
|
+
re_results = re.search(b", (.{1,10}) fps,", row, re.IGNORECASE)
|
|
1641
|
+
if re_results:
|
|
1642
|
+
fps = dec(re_results.group(1).strip().decode("utf-8"))
|
|
1643
|
+
break
|
|
1644
|
+
except Exception:
|
|
1645
|
+
fps = 0
|
|
1646
|
+
|
|
1647
|
+
# check for video stream
|
|
1648
|
+
hasVideo, resolution = False, None
|
|
1649
|
+
try:
|
|
1650
|
+
for row in rows:
|
|
1651
|
+
if b"Stream #" in row and b"Video:" in row:
|
|
1652
|
+
hasVideo = True
|
|
1653
|
+
# get resolution \d{3,5}x\d{3,5}
|
|
1654
|
+
re_results = re.search(r"\d{3,5}x\d{3,5}", row, re.IGNORECASE)
|
|
1655
|
+
if re_results:
|
|
1656
|
+
resolution = re_results.group(0).decode("utf-8")
|
|
1657
|
+
break
|
|
1658
|
+
except Exception:
|
|
1659
|
+
hasVideo, resolution = False, None
|
|
1660
|
+
|
|
1661
|
+
# check for audio stream
|
|
1662
|
+
hasAudio = False
|
|
1663
|
+
try:
|
|
1664
|
+
for row in rows:
|
|
1665
|
+
if b"Stream #" in row and b"Audio:" in row:
|
|
1666
|
+
hasAudio = True
|
|
1667
|
+
break
|
|
1668
|
+
except Exception:
|
|
1669
|
+
hasAudio = False
|
|
1670
|
+
|
|
1671
|
+
if not hasVideo and not hasAudio:
|
|
1672
|
+
return {"error": "This file does not seem to be a media file"}
|
|
1673
|
+
|
|
1674
|
+
return {
|
|
1675
|
+
"analysis_program": "ffmpeg",
|
|
1676
|
+
"frames_number": int(fps * duration),
|
|
1677
|
+
"duration_ms": duration * 1000,
|
|
1678
|
+
"duration": duration,
|
|
1679
|
+
"audio_duration": cfg.NA,
|
|
1680
|
+
"fps": fps,
|
|
1681
|
+
"has_video": hasVideo,
|
|
1682
|
+
"has_audio": hasAudio,
|
|
1683
|
+
"bitrate": bitrate,
|
|
1684
|
+
"resolution": resolution,
|
|
1685
|
+
"format_long_name": "",
|
|
1686
|
+
}
|
|
1687
|
+
|
|
1688
|
+
|
|
1689
|
+
def behavior_color(colors_list: list, idx: int, default_color: str = "darkgray"):
|
|
1690
|
+
"""
|
|
1691
|
+
return color with index corresponding to behavior index
|
|
1692
|
+
|
|
1693
|
+
see BEHAVIORS_PLOT_COLORS list in config.py
|
|
1694
|
+
|
|
1695
|
+
Args:
|
|
1696
|
+
colors_list (list): list of colors
|
|
1697
|
+
idx (int): index of behavior in all behaviors list (sorted)
|
|
1698
|
+
default_color (str): default color (if problem)
|
|
1699
|
+
|
|
1700
|
+
Returns:
|
|
1701
|
+
str: color corresponding to behavior index
|
|
1702
|
+
|
|
1703
|
+
"""
|
|
1704
|
+
|
|
1705
|
+
try:
|
|
1706
|
+
return colors_list[idx % len(colors_list)].replace("tab:", "")
|
|
1707
|
+
except Exception:
|
|
1708
|
+
return default_color
|
|
1709
|
+
|
|
1710
|
+
|
|
1711
|
+
def all_behaviors(ethogram: dict) -> list:
|
|
1712
|
+
"""
|
|
1713
|
+
extract all behaviors from the submitted ethogram
|
|
1714
|
+
behaviors are alphabetically sorted
|
|
1715
|
+
|
|
1716
|
+
Args:
|
|
1717
|
+
ethogram (dict): ethogram
|
|
1718
|
+
|
|
1719
|
+
Returns:
|
|
1720
|
+
list: behaviors code (alphabetically sorted)
|
|
1721
|
+
"""
|
|
1722
|
+
|
|
1723
|
+
return [ethogram[x][cfg.BEHAVIOR_CODE] for x in sorted_keys(ethogram)]
|
|
1724
|
+
|
|
1725
|
+
|
|
1726
|
+
def all_subjects(subjects: dict) -> list:
|
|
1727
|
+
"""
|
|
1728
|
+
extract all subjects from the subject configuration dictionary
|
|
1729
|
+
|
|
1730
|
+
Args:
|
|
1731
|
+
subject configuration (dict)
|
|
1732
|
+
|
|
1733
|
+
Returns:
|
|
1734
|
+
list: subjects name
|
|
1735
|
+
"""
|
|
1736
|
+
|
|
1737
|
+
return [subjects[x][cfg.SUBJECT_NAME] for x in sorted_keys(subjects)]
|
|
1738
|
+
|
|
1739
|
+
|
|
1740
|
+
def has_coding_map(ethogram: dict, behavior_idx: str) -> bool:
|
|
1741
|
+
"""
|
|
1742
|
+
check if behavior index has a coding map
|
|
1743
|
+
"""
|
|
1744
|
+
if not ethogram.get(behavior_idx, False):
|
|
1745
|
+
return False
|
|
1746
|
+
if not ethogram[behavior_idx].get("coding map", False):
|
|
1747
|
+
return False
|
|
1748
|
+
return True
|
|
1749
|
+
|
|
1750
|
+
|
|
1751
|
+
def dir_images_number(dir_path_str: str) -> dict:
|
|
1752
|
+
"""
|
|
1753
|
+
return number of images in dir_path (see cfg.IMAGE_EXTENSIONS)
|
|
1754
|
+
"""
|
|
1755
|
+
|
|
1756
|
+
dir_path = Path(dir_path_str)
|
|
1757
|
+
if not dir_path.is_dir():
|
|
1758
|
+
return {"error": f"The directory {dir_path_str} does not exists"}
|
|
1759
|
+
img_count = 0
|
|
1760
|
+
for pattern in cfg.IMAGE_EXTENSIONS:
|
|
1761
|
+
img_count += len(list(dir_path.glob(pattern)))
|
|
1762
|
+
img_count += len(list(dir_path.glob(pattern.upper())))
|
|
1763
|
+
return {"number of images": img_count}
|
|
1764
|
+
|
|
1765
|
+
|
|
1766
|
+
def intersection(A, B, C, D):
|
|
1767
|
+
"""
|
|
1768
|
+
line segments intersection with decimal precision
|
|
1769
|
+
return True when intersection else False
|
|
1770
|
+
"""
|
|
1771
|
+
getcontext().prec = 28
|
|
1772
|
+
|
|
1773
|
+
xa, ya = dec(str(A[0])), dec(str(A[1]))
|
|
1774
|
+
xb, yb = dec(str(B[0])), dec(str(B[1]))
|
|
1775
|
+
xc, yc = dec(str(C[0])), dec(str(C[1]))
|
|
1776
|
+
xd, yd = dec(str(D[0])), dec(str(D[1]))
|
|
1777
|
+
|
|
1778
|
+
# check if first segment is vertical
|
|
1779
|
+
try:
|
|
1780
|
+
if xa == xb:
|
|
1781
|
+
slope = (yc - yd) / (xc - xd)
|
|
1782
|
+
intersept = yc - slope * xc
|
|
1783
|
+
xm = xa
|
|
1784
|
+
ym = slope * xm + intersept
|
|
1785
|
+
|
|
1786
|
+
# check if second segment is vertical
|
|
1787
|
+
elif xc == xd:
|
|
1788
|
+
slope = (ya - yb) / (xa - xb)
|
|
1789
|
+
intersept = ya - slope * xa
|
|
1790
|
+
xm = xc
|
|
1791
|
+
ym = slope * xm + intersept
|
|
1792
|
+
else:
|
|
1793
|
+
xm = (
|
|
1794
|
+
(xd * xa * yc - xd * xb * yc - xd * xa * yb - xc * xa * yd + xc * xa * yb + xd * ya * xb + xc * xb * yd - xc * ya * xb)
|
|
1795
|
+
/ (-yb * xd + yb * xc + ya * xd - ya * xc + xb * yd - xb * yc - xa * yd + xa * yc)
|
|
1796
|
+
).quantize(dec(".001"), rounding=ROUND_DOWN)
|
|
1797
|
+
ym = (
|
|
1798
|
+
(yb * xc * yd - yb * yc * xd - ya * xc * yd + ya * yc * xd - xa * yb * yd + xa * yb * yc + ya * xb * yd - ya * xb * yc)
|
|
1799
|
+
/ (-yb * xd + yb * xc + ya * xd - ya * xc + xb * yd - xb * yc - xa * yd + xa * yc)
|
|
1800
|
+
).quantize(dec(".001"), rounding=ROUND_DOWN)
|
|
1801
|
+
|
|
1802
|
+
xmin1, xmax1 = min(xa, xb), max(xa, xb)
|
|
1803
|
+
xmin2, xmax2 = min(xc, xd), max(xc, xd)
|
|
1804
|
+
ymin1, ymax1 = min(ya, yb), max(ya, yb)
|
|
1805
|
+
ymin2, ymax2 = min(yc, yd), max(yc, yd)
|
|
1806
|
+
|
|
1807
|
+
return xm >= xmin1 and xm <= xmax1 and xm >= xmin2 and xm <= xmax2 and ym >= ymin1 and ym <= ymax1 and ym >= ymin2 and ym <= ymax2
|
|
1808
|
+
|
|
1809
|
+
except Exception: # for cases xa=xb=xc=xd
|
|
1810
|
+
return True
|