psychopy 2024.2.1__py3-none-any.whl → 2024.2.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of psychopy might be problematic. Click here for more details.
- psychopy/.DS_Store +0 -0
- psychopy/GIT_SHA +1 -1
- psychopy/VERSION +1 -1
- psychopy/__init__.py +10 -1
- psychopy/__init__.py.orig +65 -0
- psychopy/app/{locale/ar_001/.DS_Store → .DS_Store} +0 -0
- psychopy/app/Resources/.DS_Store +0 -0
- psychopy/app/_psychopyApp.py +11 -3
- psychopy/app/appData.spec +1 -1
- psychopy/app/builder/builder.py +1 -1
- psychopy/app/builder/builder.py.orig +3932 -0
- psychopy/app/builder/dialogs/__init__.py.orig +1679 -0
- psychopy/app/builder/dialogs/paramCtrls.py +1 -1
- psychopy/app/builder/dialogs/paramCtrls.py.orig +713 -0
- psychopy/app/colorpicker/__init__.py.orig +411 -0
- psychopy/app/cortex.log +0 -0
- psychopy/app/jobs.py +8 -1
- psychopy/app/locale/ar_001/LC_MESSAGE/messages.po +2452 -1731
- psychopy/app/locale/zh_CN/LC_MESSAGE/zh_CN.mo +0 -0
- psychopy/app/locale/zh_CN/LC_MESSAGE/zh_CN.po +6127 -0
- psychopy/app/locale/zh_CN/LC_MESSAGE/zh_CN_allFlagged.mo +0 -0
- psychopy/app/locale/zh_CN/LC_MESSAGE/zh_CN_allFlagged.po +7366 -0
- psychopy/app/plugin_manager/dialog.py +9 -7
- psychopy/app/ribbon.py +2 -1
- psychopy/app/runner/runner.py +7 -5
- psychopy/clock.py +8 -4
- psychopy/core.py.orig +169 -0
- psychopy/demos/builder/Design Templates/randomisedBlocks/html/index.html +23 -0
- psychopy/demos/builder/Design Templates/randomisedBlocks/html/randomisedBlocks-legacy-browsers.js +423 -0
- psychopy/demos/builder/Design Templates/randomisedBlocks/html/randomisedBlocks.js +427 -0
- psychopy/demos/builder/Design Templates/randomisedBlocks/html/resources/chooseBlock.xlsx +0 -0
- psychopy/demos/builder/Design Templates/randomisedBlocks/html/resources/facesBlock.xlsx +0 -0
- psychopy/demos/builder/Design Templates/randomisedBlocks/html/resources/housesBlock.xlsx +0 -0
- psychopy/demos/builder/Design Templates/randomisedBlocks/html/resources/stims/face01.jpg +0 -0
- psychopy/demos/builder/Design Templates/randomisedBlocks/html/resources/stims/face02.jpg +0 -0
- psychopy/demos/builder/Design Templates/randomisedBlocks/html/resources/stims/face03.jpg +0 -0
- psychopy/demos/builder/Design Templates/randomisedBlocks/html/resources/stims/house01.jpg +0 -0
- psychopy/demos/builder/Design Templates/randomisedBlocks/html/resources/stims/house02.jpg +0 -0
- psychopy/demos/builder/Design Templates/randomisedBlocks/html/resources/stims/house03.jpg +0 -0
- psychopy/demos/builder/Design Templates/randomisedBlocks/randomisedBlocks.py +330 -0
- psychopy/demos/builder/Design Templates/randomisedBlocks/randomisedBlocks_lastrun.py +330 -0
- psychopy/demos/builder/Feature Demos/eyetracking/eyetracking.xml +298 -0
- psychopy/demos/builder/Feature Demos/eyetracking/eyetracking.xsd +120 -0
- psychopy/demos/builder/Tools/.DS_Store +0 -0
- psychopy/demos/builder/Tools/gammaCalibration/.DS_Store +0 -0
- psychopy/demos/builder/Tools/gammaCalibration/data/_gamma_correction_visual_2022-05-18_14h18.29.439.csv +38 -0
- psychopy/demos/builder/Tools/gammaCalibration/data/_gamma_correction_visual_2022-05-18_14h18.29.439.log +3418 -0
- psychopy/demos/builder/Tools/gammaCalibration/data/_gamma_correction_visual_2022-05-18_14h18.29.439.psydat +0 -0
- psychopy/demos/builder/Tools/gammaCalibration/data/x1_gamma_correction_visual_2022-05-17_13h59.42.928.csv +2 -0
- psychopy/demos/builder/Tools/gammaCalibration/data/x1_gamma_correction_visual_2022-05-17_13h59.42.928.log +15 -0
- psychopy/demos/builder/Tools/gammaCalibration/data/x1_gamma_correction_visual_2022-05-17_13h59.42.928.psydat +0 -0
- psychopy/demos/builder/Tools/gammaCalibration/gamma_correction_visual.psyexp +323 -0
- psychopy/demos/builder/Tools/gammaCalibration/gamma_correction_visual.py +562 -0
- psychopy/demos/builder/Tools/gammaCalibration/gamma_correction_visual_lastrun.py +562 -0
- psychopy/demos/builder/Tools/gammaCalibration/questStairs.xlsx +0 -0
- psychopy/demos/builder/Tools/gammaCalibration/readme.md +0 -0
- psychopy/demos/builder/Tools/gammaCalibration/resources/low_contrast.png +0 -0
- psychopy/demos/builder/Tools/gammaCalibration/resources/make_2nd_order_tex.py +59 -0
- psychopy/demos/builder/Tools/gammaCalibration/resources/second_order_tex.png +0 -0
- psychopy/demos/coder/.DS_Store +0 -0
- psychopy/demos/coder/experiment control/info_gamma.pickle +0 -0
- psychopy/demos/coder/iohub/.iohpid +1 -0
- psychopy/demos/coder/iohub/eyetracking/.iohpid +1 -0
- psychopy/demos/coder/iohub/wintab/.DS_Store +0 -0
- psychopy/demos/coder/stimuli/.DS_Store +0 -0
- psychopy/demos/coder/stimuli/radialGratingContracting.py +29 -0
- psychopy/experiment/_experiment.py.orig +1032 -0
- psychopy/experiment/components/.DS_Store +0 -0
- psychopy/experiment/components/_base.py +13 -4
- psychopy/experiment/components/_base.py.orig +823 -0
- psychopy/experiment/components/form/.DS_Store +0 -0
- psychopy/experiment/components/microphone/__init__.py +10 -1
- psychopy/experiment/components/microphone/__init__.py.orig +490 -0
- psychopy/experiment/components/polygon/__init__.py +21 -22
- psychopy/experiment/components/settings/__init__.py +13 -14
- psychopy/experiment/components/settings/__init__.py.orig +1337 -0
- psychopy/experiment/components/textbox/__init__.py.orig +310 -0
- psychopy/experiment/components/webcam/.DS_Store +0 -0
- psychopy/experiment/components/webcam/light/.DS_Store +0 -0
- psychopy/experiment/flow.py +10 -8
- psychopy/experiment/loops.py.orig +829 -0
- psychopy/experiment/params.py +8 -3
- psychopy/experiment/params.py.orig +408 -0
- psychopy/experiment/routine.py.orig +503 -0
- psychopy/experiment/routines/_base.py +15 -6
- psychopy/experiment/routines/counterbalance/__init__.py +1 -0
- psychopy/gui/qtgui.py +14 -7
- psychopy/gui/util.py +10 -14
- psychopy/gui/wxgui.py +10 -4
- psychopy/hardware/.DS_Store +0 -0
- psychopy/hardware/brainproducts.py.orig +680 -0
- psychopy/hardware/iolab.py.orig +238 -0
- psychopy/hardware/manager.py +1 -1
- psychopy/hardware/photodiode.py +59 -27
- psychopy/hardware/serialport.py +51 -0
- psychopy/hardware/speaker.py +4 -4
- psychopy/iohub/datastore/__init__.py.orig +443 -0
- psychopy/iohub/datastore/util.py.orig +692 -0
- psychopy/iohub/devices/mouse/darwin.py.orig +427 -0
- psychopy/iohub/devices/mouse/linux2.py.orig +198 -0
- psychopy/preferences/.DS_Store +0 -0
- psychopy/projects/pavlovia.py +10 -3
- psychopy/projects/pavlovia.py.orig +1295 -0
- psychopy/sound/backend_ptb.py +22 -5
- psychopy/sound/transcribe.py +24 -4
- psychopy/tests/.DS_Store +0 -0
- psychopy/tests/data/.DS_Store +0 -0
- psychopy/tests/data/TestCircle_fill_local.png +0 -0
- psychopy/tests/data/__test.png +0 -0
- psychopy/tests/data/aperture1_normHexbackground_local.png +0 -0
- psychopy/tests/data/aperture1_norm_local.png +0 -0
- psychopy/tests/data/aperture2_normHexbackground_local.png +0 -0
- psychopy/tests/data/beatandrcos_height_local.png +0 -0
- psychopy/tests/data/beatandrcos_normAddBlend_local.png +0 -0
- psychopy/tests/data/beatandrcos_normHexbackground_local.png +0 -0
- psychopy/tests/data/beatandrcos_norm_local.png +0 -0
- psychopy/tests/data/beatandrcos_stencil_local.png +0 -0
- psychopy/tests/data/blend_add_height_local.png +0 -0
- psychopy/tests/data/blend_add_normAddBlend_local.png +0 -0
- psychopy/tests/data/blend_add_normHexbackground_local.png +0 -0
- psychopy/tests/data/blend_add_normNoShade_local.png +0 -0
- psychopy/tests/data/blend_add_norm_local.png +0 -0
- psychopy/tests/data/blend_add_stencil_local.png +0 -0
- psychopy/tests/data/bufferimg_gabor_height_local.png +0 -0
- psychopy/tests/data/bufferimg_gabor_normAddBlend_local.png +0 -0
- psychopy/tests/data/bufferimg_gabor_normHexbackground_local.png +0 -0
- psychopy/tests/data/bufferimg_gabor_normNoShade_local.png +0 -0
- psychopy/tests/data/bufferimg_gabor_norm_local.png +0 -0
- psychopy/tests/data/bufferimg_gabor_stencil_local.png +0 -0
- psychopy/tests/data/circleHex_height_local.png +0 -0
- psychopy/tests/data/circleHex_normAddBlend_local.png +0 -0
- psychopy/tests/data/circleHex_normHexbackground_local.png +0 -0
- psychopy/tests/data/circleHex_normNoShade_local.png +0 -0
- psychopy/tests/data/circleHex_norm_local.png +0 -0
- psychopy/tests/data/circleHex_stencil_local.png +0 -0
- psychopy/tests/data/color_comparison_local.png +0 -0
- psychopy/tests/data/corrFullRandom_local.csv +16 -0
- psychopy/tests/data/corrFullRandom_local.tsv +6 -0
- psychopy/tests/data/correctScript/.DS_Store +0 -0
- psychopy/tests/data/dots_height_local.png +0 -0
- psychopy/tests/data/dots_normAddBlend_local.png +0 -0
- psychopy/tests/data/dots_normHexbackground_local.png +0 -0
- psychopy/tests/data/dots_normNoShade_local.png +0 -0
- psychopy/tests/data/dots_norm_local.png +0 -0
- psychopy/tests/data/dots_stencil_local.png +0 -0
- psychopy/tests/data/elarray1_height_local.png +0 -0
- psychopy/tests/data/elarray1_normAddBlend_local.png +0 -0
- psychopy/tests/data/elarray1_normHexbackground_local.png +0 -0
- psychopy/tests/data/elarray1_norm_local.png +0 -0
- psychopy/tests/data/elarray1_stencil_local.png +0 -0
- psychopy/tests/data/envelopeandrcos_height_local.png +0 -0
- psychopy/tests/data/envelopeandrcos_normAddBlend_local.png +0 -0
- psychopy/tests/data/envelopeandrcos_normHexbackground_local.png +0 -0
- psychopy/tests/data/envelopeandrcos_norm_local.png +0 -0
- psychopy/tests/data/envelopeandrcos_stencil_local.png +0 -0
- psychopy/tests/data/envelopepowerandrcos_height_local.png +0 -0
- psychopy/tests/data/envelopepowerandrcos_normAddBlend_local.png +0 -0
- psychopy/tests/data/envelopepowerandrcos_normHexbackground_local.png +0 -0
- psychopy/tests/data/envelopepowerandrcos_norm_local.png +0 -0
- psychopy/tests/data/envelopepowerandrcos_stencil_local.png +0 -0
- psychopy/tests/data/gabor1_height_local.png +0 -0
- psychopy/tests/data/gabor1_normAddBlend_local.png +0 -0
- psychopy/tests/data/gabor1_normHexbackground_local.png +0 -0
- psychopy/tests/data/gabor1_normNoShade_local.png +0 -0
- psychopy/tests/data/gabor1_norm_local.png +0 -0
- psychopy/tests/data/gabor1_stencil_local.png +0 -0
- psychopy/tests/data/greyscale_normHexbackground_local.png +0 -0
- psychopy/tests/data/imageAndGauss_height_local.png +0 -0
- psychopy/tests/data/imageAndGauss_normAddBlend_local.png +0 -0
- psychopy/tests/data/imageAndGauss_normHexbackground_local.png +0 -0
- psychopy/tests/data/imageAndGauss_normNoShade_local.png +0 -0
- psychopy/tests/data/imageAndGauss_norm_local.png +0 -0
- psychopy/tests/data/imageAndGauss_stencil_local.png +0 -0
- psychopy/tests/data/movFrame1_stencil_local.png +0 -0
- psychopy/tests/data/noiseAndRcos_height_local.png +0 -0
- psychopy/tests/data/noiseAndRcos_normAddBlend_local.png +0 -0
- psychopy/tests/data/noiseAndRcos_normHexbackground_local.png +0 -0
- psychopy/tests/data/noiseAndRcos_normNoShade_local.png +0 -0
- psychopy/tests/data/noiseAndRcos_norm_local.png +0 -0
- psychopy/tests/data/noiseAndRcos_stencil_local.png +0 -0
- psychopy/tests/data/noiseFiltersAndRcos_height_local.png +0 -0
- psychopy/tests/data/noiseFiltersAndRcos_normAddBlend_local.png +0 -0
- psychopy/tests/data/noiseFiltersAndRcos_normHexbackground_local.png +0 -0
- psychopy/tests/data/noiseFiltersAndRcos_normNoShade_local.png +0 -0
- psychopy/tests/data/noiseFiltersAndRcos_norm_local.png +0 -0
- psychopy/tests/data/noiseFiltersAndRcos_stencil_local.png +0 -0
- psychopy/tests/data/numpyImage_height_local.png +0 -0
- psychopy/tests/data/numpyImage_normAddBlend_local.png +0 -0
- psychopy/tests/data/numpyImage_normHexbackground_local.png +0 -0
- psychopy/tests/data/numpyImage_normNoShade_local.png +0 -0
- psychopy/tests/data/numpyImage_norm_local.png +0 -0
- psychopy/tests/data/numpyImage_stencil_local.png +0 -0
- psychopy/tests/data/shape2_1_normAddBlend_local.png +0 -0
- psychopy/tests/data/shape2_1_normHexbackground_local.png +0 -0
- psychopy/tests/data/shape2_1_normNoShade_local.png +0 -0
- psychopy/tests/data/shape2_1_norm_local.png +0 -0
- psychopy/tests/data/shape2_1_stencil_local.png +0 -0
- psychopy/tests/data/testLoopsBlocks.psyexp_local.py +328 -0
- psychopy/tests/data/text1_height_local.png +0 -0
- psychopy/tests/data/text1_normAddBlend_local.png +0 -0
- psychopy/tests/data/text1_normHexbackground_local.png +0 -0
- psychopy/tests/data/text1_norm_local.png +0 -0
- psychopy/tests/data/text1_stencil_local.png +0 -0
- psychopy/tests/data/text2_height.png +0 -0
- psychopy/tests/data/text2_normAddBlend.png +0 -0
- psychopy/tests/data/text2_normHexbackground.png +0 -0
- psychopy/tests/data/text2_stencil.png +0 -0
- psychopy/tests/data/wedge1_height_local.png +0 -0
- psychopy/tests/data/wedge1_normAddBlend_local.png +0 -0
- psychopy/tests/data/wedge1_normHexbackground_local.png +0 -0
- psychopy/tests/data/wedge1_normNoShade_local.png +0 -0
- psychopy/tests/data/wedge1_norm_local.png +0 -0
- psychopy/tests/data/wedge1_stencil_local.png +0 -0
- psychopy/tests/test_app/.DS_Store +0 -0
- psychopy/tests/test_app/test_builder/.DS_Store +0 -0
- psychopy/tests/test_app/test_builder/data/_2021_ 5_03_1206.csv +9 -0
- psychopy/tests/test_app/test_builder/data/_2021_ 5_03_1206.log +177 -0
- psychopy/tests/test_app/test_builder/data/_2021_ 5_03_1206.psydat +0 -0
- psychopy/tests/test_app/test_builder/data/_2021_ 5_03_1206.xlsx +0 -0
- psychopy/tests/test_app/test_builder/data/_2021_ 5_03_1324.csv +9 -0
- psychopy/tests/test_app/test_builder/data/_2021_ 5_03_1324.log +168 -0
- psychopy/tests/test_app/test_builder/data/_2021_ 5_03_1324.psydat +0 -0
- psychopy/tests/test_app/test_builder/data/_2021_ 5_03_1324.xlsx +0 -0
- psychopy/tests/test_data/.DS_Store +0 -0
- psychopy/tests/test_hardware/test_CRS_BitsSharp.py.orig +68 -0
- psychopy/tests/test_tools/test_arraytools.py +112 -0
- psychopy/tests/test_visual/test_image.py.orig +219 -0
- psychopy/tools/arraytools.py +47 -0
- psychopy/tools/versionchooser.py +1 -1
- psychopy/visual/backends/pygletbackend.py +26 -8
- psychopy/visual/basevisual.py.orig +1723 -0
- psychopy/visual/form.py.orig +1181 -0
- psychopy/visual/text.py.orig +752 -0
- psychopy/visual/textbox2/textbox2.py.orig +1315 -0
- psychopy/visual/window.py +13 -5
- psychopy/visual/windowwarp.py.orig +463 -0
- {psychopy-2024.2.1.dist-info → psychopy-2024.2.4.dist-info}/METADATA +9 -9
- {psychopy-2024.2.1.dist-info → psychopy-2024.2.4.dist-info}/RECORD +244 -78
- {psychopy-2024.2.1.dist-info → psychopy-2024.2.4.dist-info}/WHEEL +1 -1
- {psychopy-2024.2.1.dist-info → psychopy-2024.2.4.dist-info}/entry_points.txt +2 -0
- psychopy/app/locale/ar_001/LC_MESSAGE/messages.mo +0 -0
- psychopy/app/locale/cs_CZ/LC_MESSAGE/messages.mo +0 -0
- psychopy/app/locale/da_DK/LC_MESSAGE/messages.mo +0 -0
- psychopy/app/locale/de_DE/LC_MESSAGE/messages.mo +0 -0
- psychopy/app/locale/el_GR/LC_MESSAGE/messages.mo +0 -0
- psychopy/app/locale/en_NZ/LC_MESSAGE/messages.mo +0 -0
- psychopy/app/locale/en_US/LC_MESSAGE/messages.mo +0 -0
- psychopy/app/locale/es_CO/LC_MESSAGE/messages.mo +0 -0
- psychopy/app/locale/es_ES/LC_MESSAGE/messages.mo +0 -0
- psychopy/app/locale/es_US/LC_MESSAGE/messages.mo +0 -0
- psychopy/app/locale/et_EE/LC_MESSAGE/messages.mo +0 -0
- psychopy/app/locale/fa_IR/LC_MESSAGE/messages.mo +0 -0
- psychopy/app/locale/fi_FI/LC_MESSAGE/messages.mo +0 -0
- psychopy/app/locale/fr_FR/LC_MESSAGE/messages.mo +0 -0
- psychopy/app/locale/he_IL/LC_MESSAGE/messages.mo +0 -0
- psychopy/app/locale/hi_IN/LC_MESSAGE/messages.mo +0 -0
- psychopy/app/locale/hu_HU/LC_MESSAGE/messages.mo +0 -0
- psychopy/app/locale/it_IT/LC_MESSAGE/messages.mo +0 -0
- psychopy/app/locale/ja_JP/LC_MESSAGE/messages.mo +0 -0
- psychopy/app/locale/ko_KR/LC_MESSAGE/messages.mo +0 -0
- psychopy/app/locale/ms_MY/LC_MESSAGE/messages.mo +0 -0
- psychopy/app/locale/nl_NL/LC_MESSAGE/messages.mo +0 -0
- psychopy/app/locale/nn_NO/LC_MESSAGE/messages.mo +0 -0
- psychopy/app/locale/pl_PL/LC_MESSAGE/messages.mo +0 -0
- psychopy/app/locale/pt_PT/LC_MESSAGE/messages.mo +0 -0
- psychopy/app/locale/ro_RO/LC_MESSAGE/messages.mo +0 -0
- psychopy/app/locale/ru_RU/LC_MESSAGE/messages.mo +0 -0
- psychopy/app/locale/sv_SE/LC_MESSAGE/messages.mo +0 -0
- psychopy/app/locale/tr_TR/LC_MESSAGE/messages.mo +0 -0
- psychopy/app/locale/zh_CN/LC_MESSAGE/messages.mo +0 -0
- psychopy/app/locale/zh_TW/LC_MESSAGE/messages.mo +0 -0
- psychopy-2024.2.1.dist-info/licenses/AUTHORS.md +0 -138
- /psychopy/{app/locale/ar_001/LC_MESSAGE → demos/builder}/.DS_Store +0 -0
- /psychopy/{app/locale/es_ES/LC_MESSAGE → demos/builder/Experiments}/.DS_Store +0 -0
- /psychopy/{visual → demos/builder/Tools/gammaCalibration/data}/.DS_Store +0 -0
- {psychopy-2024.2.1.dist-info → psychopy-2024.2.4.dist-info}/licenses/LICENSE +0 -0
|
@@ -0,0 +1,692 @@
|
|
|
1
|
+
#!/usr/bin/env python
|
|
2
|
+
# -*- coding: utf-8 -*-
|
|
3
|
+
# Part of the PsychoPy library
|
|
4
|
+
# Copyright (C) 2012-2020 iSolver Software Solutions (C) 2021 Open Science Tools Ltd.
|
|
5
|
+
# Distributed under the terms of the GNU General Public License (GPL).
|
|
6
|
+
|
|
7
|
+
import numbers # numbers.Integral is like (int, long) but supports Py3
|
|
8
|
+
import os
|
|
9
|
+
from collections import namedtuple
|
|
10
|
+
import json
|
|
11
|
+
import numpy
|
|
12
|
+
|
|
13
|
+
from ..errors import print2err
|
|
14
|
+
|
|
15
|
+
from pkg_resources import parse_version
|
|
16
|
+
import tables
|
|
17
|
+
|
|
18
|
+
if parse_version(tables.__version__) < parse_version('3'):
|
|
19
|
+
from tables import openFile as open_file
|
|
20
|
+
|
|
21
|
+
walk_groups = "walkGroups"
|
|
22
|
+
list_nodes = "listNodes"
|
|
23
|
+
get_node = "getNode"
|
|
24
|
+
read_where = "readWhere"
|
|
25
|
+
else:
|
|
26
|
+
from tables import open_file
|
|
27
|
+
|
|
28
|
+
walk_groups = "walk_groups"
|
|
29
|
+
list_nodes = "list_nodes"
|
|
30
|
+
get_node = "get_node"
|
|
31
|
+
read_where = "read_where"
|
|
32
|
+
|
|
33
|
+
_hubFiles = []
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
def openHubFile(filepath, filename, mode):
|
|
37
|
+
"""
|
|
38
|
+
Open an HDF5 DataStore file and register it so that it is closed even on interpreter crash.
|
|
39
|
+
"""
|
|
40
|
+
global _hubFiles
|
|
41
|
+
hubFile = open_file(os.path.join(filepath, filename), mode)
|
|
42
|
+
_hubFiles.append(hubFile)
|
|
43
|
+
return hubFile
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
def displayDataFileSelectionDialog(starting_dir=None):
|
|
47
|
+
"""Shows a FileDialog and lets you select a .hdf5 file to open for
|
|
48
|
+
processing."""
|
|
49
|
+
from psychopy.gui.qtgui import fileOpenDlg
|
|
50
|
+
|
|
51
|
+
filePath = fileOpenDlg(tryFilePath=starting_dir,
|
|
52
|
+
prompt="Select a ioHub HDF5 File",
|
|
53
|
+
allowed='HDF5 Files (*.hdf5)')
|
|
54
|
+
|
|
55
|
+
if filePath is None:
|
|
56
|
+
return None
|
|
57
|
+
|
|
58
|
+
return filePath
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
def displayEventTableSelectionDialog(title, list_label, list_values, default=u'Select'):
|
|
62
|
+
from psychopy import gui
|
|
63
|
+
if default not in list_values:
|
|
64
|
+
list_values.insert(0, default)
|
|
65
|
+
else:
|
|
66
|
+
list_values.remove(list_values)
|
|
67
|
+
list_values.insert(0, default)
|
|
68
|
+
|
|
69
|
+
selection_dict = {list_label: list_values}
|
|
70
|
+
dlg_info = dict(selection_dict)
|
|
71
|
+
infoDlg = gui.DlgFromDict(dictionary=dlg_info, title=title)
|
|
72
|
+
if not infoDlg.OK:
|
|
73
|
+
return None
|
|
74
|
+
|
|
75
|
+
while list(dlg_info.values())[0] == default and infoDlg.OK:
|
|
76
|
+
dlg_info = dict(selection_dict)
|
|
77
|
+
infoDlg = gui.DlgFromDict(dictionary=dlg_info, title=title)
|
|
78
|
+
|
|
79
|
+
if not infoDlg.OK:
|
|
80
|
+
return None
|
|
81
|
+
|
|
82
|
+
return list(dlg_info.values())[0]
|
|
83
|
+
|
|
84
|
+
|
|
85
|
+
def saveEventReport(hdf5FilePath="", eventType="", eventFields=[], trialStartMessage=None, trialStopMessage=None,
|
|
86
|
+
timeMargins=(0.0, 0.0)):
|
|
87
|
+
"""
|
|
88
|
+
Save a tab delimited event report, optionally splitting events into (trial) groups.
|
|
89
|
+
|
|
90
|
+
:param hdf5FilePath:
|
|
91
|
+
:param eventType:
|
|
92
|
+
:param eventFields:
|
|
93
|
+
:param trialStartMessage:
|
|
94
|
+
:param trialStopMessage:
|
|
95
|
+
:param timeMargins:
|
|
96
|
+
:return:
|
|
97
|
+
"""
|
|
98
|
+
# Select the hdf5 file to process.
|
|
99
|
+
if not hdf5FilePath:
|
|
100
|
+
selectedFilePath = displayDataFileSelectionDialog(os.getcwd())
|
|
101
|
+
if selectedFilePath:
|
|
102
|
+
hdf5FilePath = selectedFilePath[0]
|
|
103
|
+
if not hdf5FilePath:
|
|
104
|
+
print("Warning: saveEventReport requires hdf5FilePath. No report saved.")
|
|
105
|
+
return None
|
|
106
|
+
|
|
107
|
+
dpath, dfile = os.path.split(hdf5FilePath)
|
|
108
|
+
datafile = ExperimentDataAccessUtility(dpath, dfile)
|
|
109
|
+
|
|
110
|
+
if not eventType:
|
|
111
|
+
# Get a dict of all event types -> DataStore table info for the selected DataStore file.
|
|
112
|
+
eventTableMappings = datafile.getEventMappingInformation()
|
|
113
|
+
# Get event tables that have data...
|
|
114
|
+
events_with_data = datafile.getEventsByType()
|
|
115
|
+
|
|
116
|
+
# Select which event table to output
|
|
117
|
+
eventNameList = []
|
|
118
|
+
for event_id in list(events_with_data.keys()):
|
|
119
|
+
eventNameList.append(eventTableMappings[event_id].class_name.decode('utf-8'))
|
|
120
|
+
eventType = displayEventTableSelectionDialog("Select Event Type to Save", "Event Type:", eventNameList)
|
|
121
|
+
if eventType is None:
|
|
122
|
+
print("Warning: saveEventReport requires eventType. No report saved.")
|
|
123
|
+
datafile.close()
|
|
124
|
+
return None
|
|
125
|
+
|
|
126
|
+
trial_times = []
|
|
127
|
+
if trialStartMessage and trialStopMessage:
|
|
128
|
+
# Create a table of trial_index, trial_start_time, trial_end_time for each trial by
|
|
129
|
+
# getting the time of 'TRIAL_START' and 'TRIAL_END' experiment messages.
|
|
130
|
+
mgs_table = datafile.getEventTable('MessageEvent')
|
|
131
|
+
trial_start_msgs = mgs_table.where('text == b"%s"' % trialStartMessage)
|
|
132
|
+
for mix, msg in enumerate(trial_start_msgs):
|
|
133
|
+
trial_times.append([mix + 1, msg['time'] - timeMargins[0], 0])
|
|
134
|
+
trial_end_msgs = mgs_table.where('text == b"%s"' % trialStopMessage)
|
|
135
|
+
for mix, msg in enumerate(trial_end_msgs):
|
|
136
|
+
trial_times[mix][2] = msg['time'] + timeMargins[1]
|
|
137
|
+
del mgs_table
|
|
138
|
+
elif trialStartMessage is None and trialStopMessage is None:
|
|
139
|
+
# do not split events into trial groupings
|
|
140
|
+
pass
|
|
141
|
+
else:
|
|
142
|
+
print("Warning: saveEventReport requires trialStartMessage and trialStopMessage to be strings or both None."
|
|
143
|
+
" No report saved.")
|
|
144
|
+
datafile.close()
|
|
145
|
+
return None
|
|
146
|
+
|
|
147
|
+
# Get the event table to generate report for
|
|
148
|
+
event_table = datafile.getEventTable(eventType)
|
|
149
|
+
|
|
150
|
+
if not eventFields:
|
|
151
|
+
# If no event fields were specified, report (almost) all event fields.
|
|
152
|
+
eventFields = [c for c in event_table.colnames if c not in ['experiment_id', 'session_id', 'device_id',
|
|
153
|
+
'type', 'filter_id']]
|
|
154
|
+
|
|
155
|
+
if eventType == 'MessageEvent':
|
|
156
|
+
# Sort experiment messages by time since they may not be ordered chronologically.
|
|
157
|
+
event_table = event_table.read()
|
|
158
|
+
event_table.sort(order='time')
|
|
159
|
+
|
|
160
|
+
ecount = 0
|
|
161
|
+
# Open a file to save the tab delimited output to.
|
|
162
|
+
output_file_name = os.path.join(dpath, "%s.%s.txt" % (dfile[:-5], eventType))
|
|
163
|
+
with open(output_file_name, 'w') as output_file:
|
|
164
|
+
# Save header row to file
|
|
165
|
+
if trial_times:
|
|
166
|
+
column_names = ['TRIAL_INDEX', trialStartMessage, trialStopMessage] + eventFields
|
|
167
|
+
else:
|
|
168
|
+
column_names = eventFields
|
|
169
|
+
|
|
170
|
+
output_file.write('\t'.join(column_names))
|
|
171
|
+
output_file.write('\n')
|
|
172
|
+
|
|
173
|
+
event_groupings = []
|
|
174
|
+
if trial_times:
|
|
175
|
+
# Split events into trials
|
|
176
|
+
for tindex, tstart, tstop in trial_times:
|
|
177
|
+
if eventType == 'MessageEvent':
|
|
178
|
+
event_groupings.append(event_table[(event_table['time'] >= tstart) & (event_table['time']
|
|
179
|
+
<= tstop)])
|
|
180
|
+
else:
|
|
181
|
+
event_groupings.append(event_table.where("(time >= %f) & (time <= %f)" % (tstart, tstop)))
|
|
182
|
+
else:
|
|
183
|
+
# Report events without splitting them into trials
|
|
184
|
+
if eventType == 'MessageEvent':
|
|
185
|
+
event_groupings.append(event_table)
|
|
186
|
+
else:
|
|
187
|
+
event_groupings.append(event_table.iterrows())
|
|
188
|
+
|
|
189
|
+
# Save a row for each event within the trial period
|
|
190
|
+
for tid, trial_events in enumerate(event_groupings):
|
|
191
|
+
for event in trial_events:
|
|
192
|
+
event_data = []
|
|
193
|
+
for c in eventFields:
|
|
194
|
+
cv = event[c]
|
|
195
|
+
if type(cv) == numpy.bytes_:
|
|
196
|
+
cv = event[c].decode('utf-8')
|
|
197
|
+
if type(cv) == str and len(cv) == 0:
|
|
198
|
+
cv = '.'
|
|
199
|
+
event_data.append(str(cv))
|
|
200
|
+
if trial_times:
|
|
201
|
+
tindex, tstart, tstop = trial_times[tid]
|
|
202
|
+
output_file.write('\t'.join([str(tindex), str(tstart), str(tstop)] + event_data))
|
|
203
|
+
else:
|
|
204
|
+
output_file.write('\t'.join(event_data))
|
|
205
|
+
output_file.write('\n')
|
|
206
|
+
ecount += 1
|
|
207
|
+
|
|
208
|
+
# Done report creation, close input file
|
|
209
|
+
datafile.close()
|
|
210
|
+
return output_file_name, ecount
|
|
211
|
+
|
|
212
|
+
|
|
213
|
+
########### Experiment / Experiment Session Based Data Access #################
|
|
214
|
+
|
|
215
|
+
|
|
216
|
+
class ExperimentDataAccessUtility:
|
|
217
|
+
"""The ExperimentDataAccessUtility provides a simple, high level, way to
|
|
218
|
+
access data saved in an ioHub DataStore HDF5 file. Data access is done by
|
|
219
|
+
providing information at an experiment and session level, as well as
|
|
220
|
+
specifying the ioHub Event types you want to retrieve data for.
|
|
221
|
+
|
|
222
|
+
An instance of the ExperimentDataAccessUtility class is created by providing
|
|
223
|
+
the location and name of the file to read, as well as any session code
|
|
224
|
+
filtering you want applied to the retrieved datasets.
|
|
225
|
+
|
|
226
|
+
Args:
|
|
227
|
+
hdfFilePath (str): The path of the directory the DataStore HDF5 file is in.
|
|
228
|
+
|
|
229
|
+
hdfFileName (str): The name of the DataStore HDF5 file.
|
|
230
|
+
|
|
231
|
+
experimentCode (str): If multi-experiment support is enabled for the DataStore file, this argument can be used to specify what experiment data to load based on the experiment_code given. NOTE: Multi-experiment data file support is not well tested and should not be used at this point.
|
|
232
|
+
|
|
233
|
+
sessionCodes (str or list): The experiment session code to filter data by. If a list of codes is given, then all codes in the list will be used.
|
|
234
|
+
|
|
235
|
+
Returns:
|
|
236
|
+
object: the created instance of the ExperimentDataAccessUtility, ready to get your data!
|
|
237
|
+
|
|
238
|
+
"""
|
|
239
|
+
|
|
240
|
+
def __init__(self, hdfFilePath, hdfFileName, experimentCode=None, sessionCodes=[], mode='r'):
|
|
241
|
+
"""An instance of the ExperimentDataAccessUtility class is created by
|
|
242
|
+
providing the location and name of the file to read, as well as any
|
|
243
|
+
session code filtering you want applied to the retrieved datasets.
|
|
244
|
+
|
|
245
|
+
Args:
|
|
246
|
+
hdfFilePath (str): The path of the directory the DataStore HDF5 file is in.
|
|
247
|
+
|
|
248
|
+
hdfFileName (str): The name of the DataStore HDF5 file.
|
|
249
|
+
|
|
250
|
+
experimentCode (str): If multi-experiment support is enabled for the DataStore file, this argument can be used to specify what experiment data to load based on the experiment_code given. NOTE: Multi-experiment data file support is not well tested and should not be used at this point.
|
|
251
|
+
|
|
252
|
+
sessionCodes (str or list): The experiment session code to filter data by. If a list of codes is given, then all codes in the list will be used.
|
|
253
|
+
|
|
254
|
+
Returns:
|
|
255
|
+
object: the created instance of the ExperimentDataAccessUtility, ready to get your data!
|
|
256
|
+
|
|
257
|
+
"""
|
|
258
|
+
self.hdfFilePath = hdfFilePath
|
|
259
|
+
self.hdfFileName = hdfFileName
|
|
260
|
+
self.mode = mode
|
|
261
|
+
self.hdfFile = None
|
|
262
|
+
|
|
263
|
+
self._experimentCode = experimentCode
|
|
264
|
+
self._sessionCodes = sessionCodes
|
|
265
|
+
self._lastWhereClause = None
|
|
266
|
+
|
|
267
|
+
try:
|
|
268
|
+
self.hdfFile = openHubFile(hdfFilePath, hdfFileName, mode)
|
|
269
|
+
except Exception as e:
|
|
270
|
+
raise ExperimentDataAccessException(e)
|
|
271
|
+
|
|
272
|
+
self.getExperimentMetaData()
|
|
273
|
+
|
|
274
|
+
def printTableStructure(self, tableName):
|
|
275
|
+
"""Print to stdout the current structure and content statistics of the
|
|
276
|
+
specified DataStore table. To print out the complete structure of the
|
|
277
|
+
DataStore file, including the name of all available tables, see the
|
|
278
|
+
printHubFileStructure method.
|
|
279
|
+
|
|
280
|
+
Args:
|
|
281
|
+
tableName (str): The DataStore table name to print metadata information out for.
|
|
282
|
+
|
|
283
|
+
"""
|
|
284
|
+
if self.hdfFile:
|
|
285
|
+
hubFile = self.hdfFile
|
|
286
|
+
for group in getattr(hubFile, walk_groups)("/"):
|
|
287
|
+
for table in getattr(hubFile, list_nodes)(group, classname='Table'):
|
|
288
|
+
if table.name == tableName:
|
|
289
|
+
print('------------------')
|
|
290
|
+
print('Path:', table)
|
|
291
|
+
print('Table name:', table.name)
|
|
292
|
+
print('Number of rows in table:', table.nrows)
|
|
293
|
+
print('Number of cols in table:', len(table.colnames))
|
|
294
|
+
print('Attribute name := type, shape:')
|
|
295
|
+
for name in table.colnames:
|
|
296
|
+
print('\t', name, ':= %s, %s' % (table.coldtypes[name], table.coldtypes[name].shape))
|
|
297
|
+
print('------------------')
|
|
298
|
+
return
|
|
299
|
+
|
|
300
|
+
def printHubFileStructure(self):
|
|
301
|
+
"""Print to stdout the current global structure of the loaded DataStore
|
|
302
|
+
File."""
|
|
303
|
+
if self.hdfFile:
|
|
304
|
+
print(self.hdfFile)
|
|
305
|
+
|
|
306
|
+
def getExperimentMetaData(self):
|
|
307
|
+
"""Returns the the metadata for the experiment the datStore file is
|
|
308
|
+
for.
|
|
309
|
+
|
|
310
|
+
**Docstr TBC.**
|
|
311
|
+
|
|
312
|
+
"""
|
|
313
|
+
if self.hdfFile:
|
|
314
|
+
expcols = self.hdfFile.root.data_collection.experiment_meta_data.colnames
|
|
315
|
+
if 'sessions' not in expcols:
|
|
316
|
+
expcols.append('sessions')
|
|
317
|
+
ExperimentMetaDataInstance = namedtuple(
|
|
318
|
+
'ExperimentMetaDataInstance', expcols)
|
|
319
|
+
experiments = []
|
|
320
|
+
for e in self.hdfFile.root.data_collection.experiment_meta_data:
|
|
321
|
+
self._experimentID = e['experiment_id']
|
|
322
|
+
a_exp = list(e[:])
|
|
323
|
+
a_exp.append(self.getSessionMetaData())
|
|
324
|
+
experiments.append(ExperimentMetaDataInstance(*a_exp))
|
|
325
|
+
return experiments
|
|
326
|
+
|
|
327
|
+
def getSessionMetaData(self, sessions=None):
|
|
328
|
+
"""
|
|
329
|
+
Returns the the metadata associated with the experiment session codes in use.
|
|
330
|
+
|
|
331
|
+
**Docstr TBC.**
|
|
332
|
+
|
|
333
|
+
"""
|
|
334
|
+
if self.hdfFile:
|
|
335
|
+
if sessions is None:
|
|
336
|
+
sessions = []
|
|
337
|
+
|
|
338
|
+
sessionCodes = self._sessionCodes
|
|
339
|
+
sesscols = self.hdfFile.root.data_collection.session_meta_data.colnames
|
|
340
|
+
SessionMetaDataInstance = namedtuple('SessionMetaDataInstance', sesscols)
|
|
341
|
+
for r in self.hdfFile.root.data_collection.session_meta_data:
|
|
342
|
+
if (len(sessionCodes) == 0 or r['code'] in sessionCodes) and r['experiment_id'] == self._experimentID:
|
|
343
|
+
rcpy = list(r[:])
|
|
344
|
+
rcpy[-1] = json.loads(rcpy[-1])
|
|
345
|
+
sessions.append(SessionMetaDataInstance(*rcpy))
|
|
346
|
+
return sessions
|
|
347
|
+
|
|
348
|
+
def getTableForPath(self, path):
|
|
349
|
+
"""
|
|
350
|
+
Given a valid table path within the DataStore file, return the accociated table.
|
|
351
|
+
"""
|
|
352
|
+
getattr(self.hdfFile, get_node)(path)
|
|
353
|
+
|
|
354
|
+
def getEventTable(self, event_type):
|
|
355
|
+
"""
|
|
356
|
+
Returns the DataStore table that contains events of the specified type.
|
|
357
|
+
|
|
358
|
+
**Docstr TBC.**
|
|
359
|
+
|
|
360
|
+
"""
|
|
361
|
+
if self.hdfFile:
|
|
362
|
+
klassTables = self.hdfFile.root.class_table_mapping
|
|
363
|
+
event_column = None
|
|
364
|
+
event_value = None
|
|
365
|
+
|
|
366
|
+
if isinstance(event_type, str):
|
|
367
|
+
if event_type.find('Event') >= 0:
|
|
368
|
+
event_column = 'class_name'
|
|
369
|
+
event_value = event_type
|
|
370
|
+
else:
|
|
371
|
+
event_value = ''
|
|
372
|
+
tokens = event_type.split('_')
|
|
373
|
+
for t in tokens:
|
|
374
|
+
event_value += t[0].upper() + t[1:].lower()
|
|
375
|
+
event_value = event_type + 'Event'
|
|
376
|
+
elif isinstance(event_type, numbers.Integral):
|
|
377
|
+
event_column = 'class_id'
|
|
378
|
+
event_value = event_type
|
|
379
|
+
else:
|
|
380
|
+
<<<<<<< HEAD
|
|
381
|
+
print2err('getEventTable error: event_type arguement must be a string or and int')
|
|
382
|
+
=======
|
|
383
|
+
print2err(
|
|
384
|
+
'getEventTable error: event_type argument must be a string or and int')
|
|
385
|
+
>>>>>>> release
|
|
386
|
+
return None
|
|
387
|
+
|
|
388
|
+
result = []
|
|
389
|
+
where_cls = '(%s == b"%s") & (class_type_id == 1)' % (event_column, event_value)
|
|
390
|
+
for row in klassTables.where(where_cls):
|
|
391
|
+
result.append(row.fetch_all_fields())
|
|
392
|
+
|
|
393
|
+
if len(result) == 0:
|
|
394
|
+
return None
|
|
395
|
+
|
|
396
|
+
if len(result) != 1:
|
|
397
|
+
print2err('event_type_id passed to getEventAttribute can only return one row from CLASS_MAPPINGS.')
|
|
398
|
+
return None
|
|
399
|
+
tablePathString = result[0][3]
|
|
400
|
+
if isinstance(tablePathString, bytes):
|
|
401
|
+
tablePathString = tablePathString.decode('utf-8')
|
|
402
|
+
return getattr(self.hdfFile, get_node)(tablePathString)
|
|
403
|
+
return None
|
|
404
|
+
|
|
405
|
+
def getEventMappingInformation(self):
|
|
406
|
+
"""Returns details on how ioHub Event Types are mapped to tables within
|
|
407
|
+
the given DataStore file."""
|
|
408
|
+
if self.hdfFile:
|
|
409
|
+
eventMappings = dict()
|
|
410
|
+
class_2_table = self.hdfFile.root.class_table_mapping
|
|
411
|
+
EventTableMapping = namedtuple(
|
|
412
|
+
'EventTableMapping',
|
|
413
|
+
self.hdfFile.root.class_table_mapping.colnames)
|
|
414
|
+
for row in class_2_table[:]:
|
|
415
|
+
eventMappings[row['class_id']] = EventTableMapping(*row)
|
|
416
|
+
return eventMappings
|
|
417
|
+
return None
|
|
418
|
+
|
|
419
|
+
def getEventsByType(self, condition_str=None):
|
|
420
|
+
"""Returns a dict of all event tables within the DataStore file that
|
|
421
|
+
have at least one event instance saved.
|
|
422
|
+
|
|
423
|
+
Keys are Event Type constants, as specified by
|
|
424
|
+
iohub.EventConstants. Each value is a row iterator for events of
|
|
425
|
+
that type.
|
|
426
|
+
|
|
427
|
+
"""
|
|
428
|
+
eventTableMappings = self.getEventMappingInformation()
|
|
429
|
+
if eventTableMappings:
|
|
430
|
+
events_by_type = dict()
|
|
431
|
+
getNode = getattr(self.hdfFile, get_node)
|
|
432
|
+
for event_type_id, event_mapping_info in eventTableMappings.items():
|
|
433
|
+
try:
|
|
434
|
+
cond = '(type == %d)' % (event_type_id)
|
|
435
|
+
if condition_str:
|
|
436
|
+
cond += ' & ' + condition_str
|
|
437
|
+
et_path = event_mapping_info.table_path
|
|
438
|
+
if isinstance(et_path, bytes):
|
|
439
|
+
et_path = et_path.decode('utf-8')
|
|
440
|
+
events_by_type[event_type_id] = next(getNode(et_path).where(cond))
|
|
441
|
+
except StopIteration:
|
|
442
|
+
pass
|
|
443
|
+
return events_by_type
|
|
444
|
+
return None
|
|
445
|
+
|
|
446
|
+
def getConditionVariablesTable(self):
|
|
447
|
+
"""
|
|
448
|
+
**Docstr TBC.**
|
|
449
|
+
"""
|
|
450
|
+
cv_group = self.hdfFile.root.data_collection.condition_variables
|
|
451
|
+
ecv = 'EXP_CV_%d' % (self._experimentID,)
|
|
452
|
+
if ecv in cv_group._v_leaves:
|
|
453
|
+
return cv_group._v_leaves[ecv]
|
|
454
|
+
return None
|
|
455
|
+
|
|
456
|
+
def getConditionVariableNames(self):
|
|
457
|
+
"""
|
|
458
|
+
**Docstr TBC.**
|
|
459
|
+
"""
|
|
460
|
+
cv_group = self.hdfFile.root.data_collection.condition_variables
|
|
461
|
+
ecv = "EXP_CV_%d" % (self._experimentID,)
|
|
462
|
+
if ecv in cv_group._v_leaves:
|
|
463
|
+
ecvTable = cv_group._v_leaves[ecv]
|
|
464
|
+
return ecvTable.colnames
|
|
465
|
+
return None
|
|
466
|
+
|
|
467
|
+
def getConditionVariables(self, filter=None):
|
|
468
|
+
"""
|
|
469
|
+
**Docstr TBC.**
|
|
470
|
+
"""
|
|
471
|
+
if filter is None:
|
|
472
|
+
session_ids = []
|
|
473
|
+
for s in self.getExperimentMetaData()[0].sessions:
|
|
474
|
+
session_ids.append(s.session_id)
|
|
475
|
+
filter = dict(SESSION_ID=(' in ', session_ids))
|
|
476
|
+
|
|
477
|
+
ConditionSetInstance = None
|
|
478
|
+
|
|
479
|
+
for conditionVarName, conditionVarComparitor in filter.items():
|
|
480
|
+
avComparison, value = conditionVarComparitor
|
|
481
|
+
|
|
482
|
+
cv_group = self.hdfFile.root.data_collection.condition_variables
|
|
483
|
+
cvrows = []
|
|
484
|
+
ecv = "EXP_CV_%d" % (self._experimentID,)
|
|
485
|
+
if ecv in cv_group._v_leaves:
|
|
486
|
+
ecvTable = cv_group._v_leaves[ecv]
|
|
487
|
+
|
|
488
|
+
if ConditionSetInstance is None:
|
|
489
|
+
colnam = ecvTable.colnames
|
|
490
|
+
ConditionSetInstance = namedtuple('ConditionSetInstance', colnam)
|
|
491
|
+
|
|
492
|
+
cvrows.extend(
|
|
493
|
+
[
|
|
494
|
+
ConditionSetInstance(
|
|
495
|
+
*
|
|
496
|
+
r[:]) for r in ecvTable if all(
|
|
497
|
+
[
|
|
498
|
+
eval(
|
|
499
|
+
'{0} {1} {2}'.format(
|
|
500
|
+
r[conditionVarName],
|
|
501
|
+
conditionVarComparitor[0],
|
|
502
|
+
conditionVarComparitor[1])) for conditionVarName,
|
|
503
|
+
conditionVarComparitor in filter.items()])])
|
|
504
|
+
return cvrows
|
|
505
|
+
|
|
506
|
+
def getValuesForVariables(self, cv, value, cvNames):
|
|
507
|
+
"""
|
|
508
|
+
**Docstr TBC.**
|
|
509
|
+
"""
|
|
510
|
+
if isinstance(value, (list, tuple)):
|
|
511
|
+
resolvedValues = []
|
|
512
|
+
for v in value:
|
|
513
|
+
if isinstance(value, str) and value.startswith('@') and value.endswith('@'):
|
|
514
|
+
value = value[1:-1]
|
|
515
|
+
if value in cvNames:
|
|
516
|
+
resolvedValues.append(getattr(cv, v))
|
|
517
|
+
else:
|
|
518
|
+
raise ExperimentDataAccessException('getEventAttributeValues: {0} is not a valid attribute '
|
|
519
|
+
'name in {1}'.format(v, cvNames))
|
|
520
|
+
elif isinstance(value, str):
|
|
521
|
+
resolvedValues.append(value)
|
|
522
|
+
return resolvedValues
|
|
523
|
+
elif isinstance(value, str) and value.startswith('@') and value.endswith('@'):
|
|
524
|
+
value = value[1:-1]
|
|
525
|
+
if value in cvNames:
|
|
526
|
+
return getattr(cv, value)
|
|
527
|
+
else:
|
|
528
|
+
raise ExperimentDataAccessException('getEventAttributeValues: {0} is not a valid attribute name'
|
|
529
|
+
' in {1}'.format(value, cvNames))
|
|
530
|
+
else:
|
|
531
|
+
raise ExperimentDataAccessException('Unhandled value type !: {0} is not a valid type for value '
|
|
532
|
+
'{1}'.format(type(value), value))
|
|
533
|
+
|
|
534
|
+
def getEventAttributeValues(self, event_type_id, event_attribute_names, filter_id=None,
|
|
535
|
+
conditionVariablesFilter=None, startConditions=None, endConditions=None):
|
|
536
|
+
"""
|
|
537
|
+
**Docstr TBC.**
|
|
538
|
+
|
|
539
|
+
Args:
|
|
540
|
+
event_type_id
|
|
541
|
+
event_attribute_names
|
|
542
|
+
filter_id
|
|
543
|
+
conditionVariablesFilter
|
|
544
|
+
startConditions
|
|
545
|
+
endConditions
|
|
546
|
+
|
|
547
|
+
Returns:
|
|
548
|
+
Values for the specified event type and event attribute columns which match the provided experiment
|
|
549
|
+
condition variable filter, starting condition filer, and ending condition filter criteria.
|
|
550
|
+
"""
|
|
551
|
+
if self.hdfFile:
|
|
552
|
+
klassTables = self.hdfFile.root.class_table_mapping
|
|
553
|
+
|
|
554
|
+
deviceEventTable = None
|
|
555
|
+
|
|
556
|
+
result = [row.fetch_all_fields() for row in klassTables.where('(class_id == %d) &'
|
|
557
|
+
' (class_type_id == 1)' % (event_type_id))]
|
|
558
|
+
if len(result) != 1:
|
|
559
|
+
raise ExperimentDataAccessException("event_type_id returned > 1 row from CLASS_MAPPINGS.")
|
|
560
|
+
tablePathString = result[0][3]
|
|
561
|
+
if isinstance(tablePathString, bytes):
|
|
562
|
+
tablePathString = tablePathString.decode('utf-8')
|
|
563
|
+
deviceEventTable = getattr(self.hdfFile, get_node)(tablePathString)
|
|
564
|
+
|
|
565
|
+
for ename in event_attribute_names:
|
|
566
|
+
if ename not in deviceEventTable.colnames:
|
|
567
|
+
raise ExperimentDataAccessException('getEventAttribute: %s does not have a column named %s' %
|
|
568
|
+
(deviceEventTable.title, event_attribute_names))
|
|
569
|
+
|
|
570
|
+
resultSetList = []
|
|
571
|
+
|
|
572
|
+
csier = list(event_attribute_names)
|
|
573
|
+
csier.append('query_string')
|
|
574
|
+
csier.append('condition_set')
|
|
575
|
+
EventAttributeResults = namedtuple('EventAttributeResults', csier)
|
|
576
|
+
|
|
577
|
+
if deviceEventTable is not None:
|
|
578
|
+
if not isinstance(event_attribute_names, (list, tuple)):
|
|
579
|
+
event_attribute_names = [event_attribute_names, ]
|
|
580
|
+
|
|
581
|
+
filteredConditionVariableList = None
|
|
582
|
+
if conditionVariablesFilter is None:
|
|
583
|
+
filteredConditionVariableList = self.getConditionVariables()
|
|
584
|
+
else:
|
|
585
|
+
filteredConditionVariableList = self.getConditionVariables(conditionVariablesFilter)
|
|
586
|
+
|
|
587
|
+
cvNames = self.getConditionVariableNames()
|
|
588
|
+
|
|
589
|
+
# no further where clause building needed; get reseults and
|
|
590
|
+
# return
|
|
591
|
+
if startConditions is None and endConditions is None:
|
|
592
|
+
for cv in filteredConditionVariableList:
|
|
593
|
+
|
|
594
|
+
wclause = '( experiment_id == {0} ) & ( SESSION_ID == {1} )'.format(self._experimentID,
|
|
595
|
+
cv.SESSION_ID)
|
|
596
|
+
|
|
597
|
+
wclause += ' & ( type == {0} ) '.format(event_type_id)
|
|
598
|
+
|
|
599
|
+
if filter_id is not None:
|
|
600
|
+
wclause += '& ( filter_id == {0} ) '.format(filter_id)
|
|
601
|
+
|
|
602
|
+
resultSetList.append([])
|
|
603
|
+
|
|
604
|
+
for ename in event_attribute_names:
|
|
605
|
+
resultSetList[-1].append(getattr(deviceEventTable, read_where)(wclause, field=ename))
|
|
606
|
+
resultSetList[-1].append(wclause)
|
|
607
|
+
resultSetList[-1].append(cv)
|
|
608
|
+
|
|
609
|
+
eventAttributeResults = EventAttributeResults(*resultSetList[-1])
|
|
610
|
+
resultSetList[-1] = eventAttributeResults
|
|
611
|
+
|
|
612
|
+
return resultSetList
|
|
613
|
+
|
|
614
|
+
# start or end conditions exist....
|
|
615
|
+
for cv in filteredConditionVariableList:
|
|
616
|
+
resultSetList.append([])
|
|
617
|
+
|
|
618
|
+
wclause = '( experiment_id == {0} ) & ( session_id == {1} )'.format(self._experimentID,
|
|
619
|
+
cv.SESSION_ID)
|
|
620
|
+
|
|
621
|
+
wclause += ' & ( type == {0} ) '.format(event_type_id)
|
|
622
|
+
|
|
623
|
+
if filter_id is not None:
|
|
624
|
+
wclause += '& ( filter_id == {0} ) '.format(filter_id)
|
|
625
|
+
|
|
626
|
+
# start Conditions need to be added to where clause
|
|
627
|
+
if startConditions is not None:
|
|
628
|
+
wclause += '& ('
|
|
629
|
+
for conditionAttributeName, conditionAttributeComparitor in startConditions.items():
|
|
630
|
+
avComparison, value = conditionAttributeComparitor
|
|
631
|
+
value = self.getValuesForVariables(cv, value, cvNames)
|
|
632
|
+
wclause += ' ( {0} {1} {2} ) & '.format(conditionAttributeName, avComparison, value)
|
|
633
|
+
wclause = wclause[:-3]
|
|
634
|
+
wclause += ' ) '
|
|
635
|
+
|
|
636
|
+
# end Conditions need to be added to where clause
|
|
637
|
+
if endConditions is not None:
|
|
638
|
+
wclause += ' & ('
|
|
639
|
+
for conditionAttributeName, conditionAttributeComparitor in endConditions.items():
|
|
640
|
+
avComparison, value = conditionAttributeComparitor
|
|
641
|
+
value = self.getValuesForVariables(cv, value, cvNames)
|
|
642
|
+
wclause += ' ( {0} {1} {2} ) & '.format(conditionAttributeName, avComparison, value)
|
|
643
|
+
wclause = wclause[:-3]
|
|
644
|
+
wclause += ' ) '
|
|
645
|
+
|
|
646
|
+
for ename in event_attribute_names:
|
|
647
|
+
resultSetList[-1].append(getattr(deviceEventTable, read_where)(wclause, field=ename))
|
|
648
|
+
resultSetList[-1].append(wclause)
|
|
649
|
+
resultSetList[-1].append(cv)
|
|
650
|
+
|
|
651
|
+
eventAttributeResults = EventAttributeResults(*resultSetList[-1])
|
|
652
|
+
resultSetList[-1] = eventAttributeResults
|
|
653
|
+
|
|
654
|
+
return resultSetList
|
|
655
|
+
|
|
656
|
+
return None
|
|
657
|
+
|
|
658
|
+
def getEventIterator(self, event_type):
|
|
659
|
+
"""
|
|
660
|
+
**Docstr TBC.**
|
|
661
|
+
|
|
662
|
+
Args:
|
|
663
|
+
event_type
|
|
664
|
+
|
|
665
|
+
Returns:
|
|
666
|
+
(iterator): An iterator providing access to each matching event as a numpy recarray.
|
|
667
|
+
"""
|
|
668
|
+
return self.getEventTable(event_type).iterrows()
|
|
669
|
+
|
|
670
|
+
def close(self):
|
|
671
|
+
"""Close the ExperimentDataAccessUtility and associated DataStore
|
|
672
|
+
File."""
|
|
673
|
+
global _hubFiles
|
|
674
|
+
if self.hdfFile in _hubFiles:
|
|
675
|
+
_hubFiles.remove(self.hdfFile)
|
|
676
|
+
self.hdfFile.close()
|
|
677
|
+
|
|
678
|
+
self.experimentCodes = None
|
|
679
|
+
self.hdfFilePath = None
|
|
680
|
+
self.hdfFileName = None
|
|
681
|
+
self.mode = None
|
|
682
|
+
self.hdfFile = None
|
|
683
|
+
|
|
684
|
+
def __del__(self):
|
|
685
|
+
try:
|
|
686
|
+
self.close()
|
|
687
|
+
except Exception:
|
|
688
|
+
pass
|
|
689
|
+
|
|
690
|
+
|
|
691
|
+
class ExperimentDataAccessException(Exception):
|
|
692
|
+
pass
|