datastock 0.0.47__tar.gz → 0.0.50__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- datastock-0.0.50/.github/workflows/python-publish.yml +35 -0
- datastock-0.0.50/.github/workflows/python-testing-matrix.yml +41 -0
- datastock-0.0.50/.gitignore +135 -0
- datastock-0.0.50/CLASSIFIERS.txt +10 -0
- datastock-0.0.47/LICENSE → datastock-0.0.50/LICENSE.txt +1 -1
- {datastock-0.0.47 → datastock-0.0.50}/MANIFEST.in +1 -0
- {datastock-0.0.47/datastock.egg-info → datastock-0.0.50}/PKG-INFO +55 -53
- {datastock-0.0.47 → datastock-0.0.50}/README.md +34 -34
- datastock-0.0.50/README_figures/DataStock_Obj.png +0 -0
- datastock-0.0.50/README_figures/DataStock_refdata.png +0 -0
- datastock-0.0.50/README_figures/DirectVisualization_3d.png +0 -0
- datastock-0.0.50/datastock/__init__.py +31 -0
- {datastock-0.0.47 → datastock-0.0.50}/datastock/_class2.py +9 -5
- {datastock-0.0.47 → datastock-0.0.50}/datastock/_generic_check.py +3 -52
- datastock-0.0.50/datastock/_version.py +21 -0
- datastock-0.0.50/datastock/tests/prepublish.py +3 -0
- {datastock-0.0.47 → datastock-0.0.50}/datastock/version.py +1 -1
- {datastock-0.0.47 → datastock-0.0.50/datastock.egg-info}/PKG-INFO +55 -53
- {datastock-0.0.47 → datastock-0.0.50}/datastock.egg-info/SOURCES.txt +16 -4
- datastock-0.0.50/datastock.egg-info/entry_points.txt +2 -0
- datastock-0.0.50/datastock.egg-info/requires.txt +13 -0
- datastock-0.0.50/datastock.egg-info/top_level.txt +1 -0
- datastock-0.0.50/pyproject.toml +69 -0
- datastock-0.0.50/scripts/__init__.py +0 -0
- datastock-0.0.50/scripts/_bash_version.py +77 -0
- datastock-0.0.50/scripts/_dparser.py +109 -0
- datastock-0.0.50/scripts/main.py +147 -0
- datastock-0.0.50/setup.cfg +4 -0
- datastock-0.0.47/datastock/__init__.py +0 -10
- datastock-0.0.47/datastock.egg-info/requires.txt +0 -12
- datastock-0.0.47/datastock.egg-info/top_level.txt +0 -2
- datastock-0.0.47/pyproject.toml +0 -4
- datastock-0.0.47/setup.cfg +0 -7
- datastock-0.0.47/setup.py +0 -174
- {datastock-0.0.47 → datastock-0.0.50}/_updateversion.py +0 -0
- {datastock-0.0.47 → datastock-0.0.50}/datastock/_DataCollection_utils.py +0 -0
- {datastock-0.0.47 → datastock-0.0.50}/datastock/_class.py +0 -0
- {datastock-0.0.47 → datastock-0.0.50}/datastock/_class0.py +0 -0
- {datastock-0.0.47 → datastock-0.0.50}/datastock/_class1.py +0 -0
- {datastock-0.0.47 → datastock-0.0.50}/datastock/_class1_binning.py +0 -0
- {datastock-0.0.47 → datastock-0.0.50}/datastock/_class1_check.py +0 -0
- {datastock-0.0.47 → datastock-0.0.50}/datastock/_class1_color_touch.py +0 -0
- {datastock-0.0.47 → datastock-0.0.50}/datastock/_class1_compute.py +0 -0
- {datastock-0.0.47 → datastock-0.0.50}/datastock/_class1_domain.py +0 -0
- {datastock-0.0.47 → datastock-0.0.50}/datastock/_class1_interpolate.py +0 -0
- {datastock-0.0.47 → datastock-0.0.50}/datastock/_class1_show.py +0 -0
- {datastock-0.0.47 → datastock-0.0.50}/datastock/_class1_uniformize.py +0 -0
- {datastock-0.0.47 → datastock-0.0.50}/datastock/_class2_interactivity.py +0 -0
- {datastock-0.0.47 → datastock-0.0.50}/datastock/_class3.py +0 -0
- {datastock-0.0.47 → datastock-0.0.50}/datastock/_direct_calls.py +0 -0
- {datastock-0.0.47 → datastock-0.0.50}/datastock/_export_dataframe.py +0 -0
- {datastock-0.0.47 → datastock-0.0.50}/datastock/_find_plateau.py +0 -0
- {datastock-0.0.47 → datastock-0.0.50}/datastock/_generic_utils.py +0 -0
- {datastock-0.0.47 → datastock-0.0.50}/datastock/_generic_utils_plot.py +0 -0
- {datastock-0.0.47 → datastock-0.0.50}/datastock/_plot_BvsA_as_distribution.py +0 -0
- {datastock-0.0.47 → datastock-0.0.50}/datastock/_plot_BvsA_as_distribution_check.py +0 -0
- {datastock-0.0.47 → datastock-0.0.50}/datastock/_plot_as_array.py +0 -0
- {datastock-0.0.47 → datastock-0.0.50}/datastock/_plot_as_array_1d.py +0 -0
- {datastock-0.0.47 → datastock-0.0.50}/datastock/_plot_as_array_234d.py +0 -0
- {datastock-0.0.47 → datastock-0.0.50}/datastock/_plot_as_mobile_lines.py +0 -0
- {datastock-0.0.47 → datastock-0.0.50}/datastock/_plot_as_profile1d.py +0 -0
- {datastock-0.0.47 → datastock-0.0.50}/datastock/_plot_correlations.py +0 -0
- {datastock-0.0.47 → datastock-0.0.50}/datastock/_plot_old_backup.py +0 -0
- {datastock-0.0.47 → datastock-0.0.50}/datastock/_plot_text.py +0 -0
- {datastock-0.0.47 → datastock-0.0.50}/datastock/_saveload.py +0 -0
- {datastock-0.0.47 → datastock-0.0.50}/datastock/tests/__init__.py +0 -0
- {datastock-0.0.47 → datastock-0.0.50}/datastock/tests/output/__init__.py +0 -0
- {datastock-0.0.47 → datastock-0.0.50}/datastock/tests/test_01_DataStock.py +0 -0
- {datastock-0.0.47 → datastock-0.0.50}/datastock.egg-info/dependency_links.txt +0 -0
@@ -0,0 +1,35 @@
|
|
1
|
+
# This workflow will upload a Python Package using Twine when a release is created
|
2
|
+
# For more information see: https://help.github.com/en/actions/language-and-framework-guides/using-python-with-github-actions#publishing-to-package-registries
|
3
|
+
|
4
|
+
# This workflow uses actions that are not certified by GitHub.
|
5
|
+
# They are provided by a third-party and are governed by
|
6
|
+
# separate terms of service, privacy policy, and support
|
7
|
+
# documentation.
|
8
|
+
|
9
|
+
name: Upload Python Package
|
10
|
+
|
11
|
+
on:
|
12
|
+
push:
|
13
|
+
tags:
|
14
|
+
- '*'
|
15
|
+
branches:
|
16
|
+
- main
|
17
|
+
release:
|
18
|
+
types: [created]
|
19
|
+
|
20
|
+
jobs:
|
21
|
+
pypi:
|
22
|
+
name: Publish sdist to Pypi
|
23
|
+
runs-on: ubuntu-latest
|
24
|
+
steps:
|
25
|
+
- uses: actions/checkout@v4
|
26
|
+
- uses: astral-sh/setup-uv@v5
|
27
|
+
with:
|
28
|
+
python-version: '3.11'
|
29
|
+
- run: uv build
|
30
|
+
# Check that basic features work and we didn't miss to include crucial files
|
31
|
+
- name: import test (wheel)
|
32
|
+
run: uv run --isolated --no-project -p 3.11 --with dist/*.whl datastock/tests/prepublish.py
|
33
|
+
- name: import test (source distribution)
|
34
|
+
run: uv run --isolated --no-project -p 3.11 --with dist/*.tar.gz datastock/tests/prepublish.py
|
35
|
+
- run: uv publish -t ${{ secrets.PYPI_API_TOKEN }}
|
@@ -0,0 +1,41 @@
|
|
1
|
+
# This workflow will install Python dependencies, run tests and lint with a variety of Python versions
|
2
|
+
# For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions
|
3
|
+
|
4
|
+
name: Testing matrix
|
5
|
+
|
6
|
+
on:
|
7
|
+
push:
|
8
|
+
branches: [ devel ]
|
9
|
+
pull_request:
|
10
|
+
branches: [ devel ]
|
11
|
+
|
12
|
+
jobs:
|
13
|
+
build:
|
14
|
+
|
15
|
+
runs-on: ${{ matrix.os }}
|
16
|
+
|
17
|
+
strategy:
|
18
|
+
fail-fast: true
|
19
|
+
matrix:
|
20
|
+
os: [ubuntu-latest, windows-latest, macos-latest]
|
21
|
+
python-version: ["3.8", "3.9", "3.10", "3.11"]
|
22
|
+
|
23
|
+
steps:
|
24
|
+
|
25
|
+
# git checkout
|
26
|
+
- uses: actions/checkout@v4
|
27
|
+
|
28
|
+
# Install uv
|
29
|
+
- name: Install uv
|
30
|
+
uses: astral-sh/setup-uv@v5
|
31
|
+
with:
|
32
|
+
python-version: ${{ matrix.python-version }}
|
33
|
+
|
34
|
+
# Install library
|
35
|
+
- name: Install the project
|
36
|
+
run: uv sync --all-extras --dev
|
37
|
+
|
38
|
+
# Run tests
|
39
|
+
- name: Run tests
|
40
|
+
# For example, using `pytest`
|
41
|
+
run: uv run pytest datastock/tests
|
@@ -0,0 +1,135 @@
|
|
1
|
+
# Byte-compiled / optimized / DLL files
|
2
|
+
__pycache__/
|
3
|
+
*.py[cod]
|
4
|
+
*$py.class
|
5
|
+
|
6
|
+
# C extensions
|
7
|
+
*.so
|
8
|
+
|
9
|
+
# single sourcing verion file
|
10
|
+
datastock/_version.py
|
11
|
+
|
12
|
+
# Distribution / packaging
|
13
|
+
.Python
|
14
|
+
build/
|
15
|
+
develop-eggs/
|
16
|
+
dist/
|
17
|
+
downloads/
|
18
|
+
eggs/
|
19
|
+
.eggs/
|
20
|
+
lib/
|
21
|
+
lib64/
|
22
|
+
parts/
|
23
|
+
sdist/
|
24
|
+
var/
|
25
|
+
wheels/
|
26
|
+
pip-wheel-metadata/
|
27
|
+
share/python-wheels/
|
28
|
+
*.egg-info/
|
29
|
+
.installed.cfg
|
30
|
+
*.egg
|
31
|
+
MANIFEST
|
32
|
+
|
33
|
+
# PyInstaller
|
34
|
+
# Usually these files are written by a python script from a template
|
35
|
+
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
36
|
+
*.manifest
|
37
|
+
*.spec
|
38
|
+
|
39
|
+
# Installer logs
|
40
|
+
pip-log.txt
|
41
|
+
pip-delete-this-directory.txt
|
42
|
+
|
43
|
+
# Unit test / coverage reports
|
44
|
+
htmlcov/
|
45
|
+
.tox/
|
46
|
+
.nox/
|
47
|
+
.coverage
|
48
|
+
.coverage.*
|
49
|
+
.cache
|
50
|
+
nosetests.xml
|
51
|
+
coverage.xml
|
52
|
+
*.cover
|
53
|
+
*.py,cover
|
54
|
+
.hypothesis/
|
55
|
+
.pytest_cache/
|
56
|
+
|
57
|
+
# Translations
|
58
|
+
*.mo
|
59
|
+
*.pot
|
60
|
+
|
61
|
+
# Django stuff:
|
62
|
+
*.log
|
63
|
+
local_settings.py
|
64
|
+
db.sqlite3
|
65
|
+
db.sqlite3-journal
|
66
|
+
|
67
|
+
# Flask stuff:
|
68
|
+
instance/
|
69
|
+
.webassets-cache
|
70
|
+
|
71
|
+
# Scrapy stuff:
|
72
|
+
.scrapy
|
73
|
+
|
74
|
+
# Sphinx documentation
|
75
|
+
docs/_build/
|
76
|
+
|
77
|
+
# PyBuilder
|
78
|
+
target/
|
79
|
+
|
80
|
+
# Jupyter Notebook
|
81
|
+
.ipynb_checkpoints
|
82
|
+
|
83
|
+
# IPython
|
84
|
+
profile_default/
|
85
|
+
ipython_config.py
|
86
|
+
|
87
|
+
# pyenv
|
88
|
+
.python-version
|
89
|
+
|
90
|
+
# pipenv
|
91
|
+
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
92
|
+
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
93
|
+
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
94
|
+
# install all needed dependencies.
|
95
|
+
#Pipfile.lock
|
96
|
+
|
97
|
+
# PEP 582; used by e.g. github.com/David-OConnor/pyflow
|
98
|
+
__pypackages__/
|
99
|
+
|
100
|
+
# Celery stuff
|
101
|
+
celerybeat-schedule
|
102
|
+
celerybeat.pid
|
103
|
+
|
104
|
+
# SageMath parsed files
|
105
|
+
*.sage.py
|
106
|
+
|
107
|
+
# Environments
|
108
|
+
.env
|
109
|
+
.venv
|
110
|
+
env/
|
111
|
+
venv/
|
112
|
+
ENV/
|
113
|
+
env.bak/
|
114
|
+
venv.bak/
|
115
|
+
|
116
|
+
# Spyder project settings
|
117
|
+
.spyderproject
|
118
|
+
.spyproject
|
119
|
+
|
120
|
+
# Rope project settings
|
121
|
+
.ropeproject
|
122
|
+
|
123
|
+
# mkdocs documentation
|
124
|
+
/site
|
125
|
+
|
126
|
+
# mypy
|
127
|
+
.mypy_cache/
|
128
|
+
.dmypy.json
|
129
|
+
dmypy.json
|
130
|
+
|
131
|
+
# Pyre type checker
|
132
|
+
.pyre/
|
133
|
+
|
134
|
+
# vim swap file
|
135
|
+
*.swp
|
@@ -0,0 +1,10 @@
|
|
1
|
+
Development Status :: 5 - Production/Stable
|
2
|
+
Intended Audience :: Science/Research
|
3
|
+
Programming Language :: Python :: 3
|
4
|
+
Programming Language :: Python :: 3.6
|
5
|
+
Programming Language :: Python :: 3.7
|
6
|
+
Programming Language :: Python :: 3.8
|
7
|
+
Programming Language :: Python :: 3.9
|
8
|
+
Programming Language :: Python :: 3.10
|
9
|
+
Programming Language :: Python :: 3.11
|
10
|
+
Natural Language :: English
|
@@ -1,34 +1,36 @@
|
|
1
|
-
Metadata-Version: 2.
|
1
|
+
Metadata-Version: 2.4
|
2
2
|
Name: datastock
|
3
|
-
Version: 0.0.
|
4
|
-
Summary:
|
5
|
-
|
6
|
-
|
7
|
-
Author-email: didier.vezinet@gmail.com
|
3
|
+
Version: 0.0.50
|
4
|
+
Summary: Generic handler for multiple heterogenous numpy arrays and subclasses
|
5
|
+
Author-email: Didier VEZINET <didier.vezinet@gmail.com>
|
6
|
+
Maintainer-email: Didier VEZINET <didier.vezinet@gmail.com>
|
8
7
|
License: MIT
|
9
|
-
|
10
|
-
|
8
|
+
Project-URL: Homepage, https://github.com/ToFuProject/datastock
|
9
|
+
Project-URL: Issues, https://github.com/ToFuProject/datastock/issues
|
10
|
+
Keywords: data,analysis,interactive,heterogeneous arrays,numpy,Collection
|
11
|
+
Classifier: Development Status :: 5 - Production/Stable
|
11
12
|
Classifier: Intended Audience :: Science/Research
|
12
|
-
Classifier:
|
13
|
-
Classifier: License :: OSI Approved :: MIT License
|
13
|
+
Classifier: Programming Language :: Python :: 3
|
14
14
|
Classifier: Programming Language :: Python :: 3.6
|
15
15
|
Classifier: Programming Language :: Python :: 3.7
|
16
16
|
Classifier: Programming Language :: Python :: 3.8
|
17
|
+
Classifier: Programming Language :: Python :: 3.9
|
18
|
+
Classifier: Programming Language :: Python :: 3.10
|
19
|
+
Classifier: Programming Language :: Python :: 3.11
|
17
20
|
Classifier: Natural Language :: English
|
18
|
-
Requires-Python: >=3.
|
21
|
+
Requires-Python: >=3.8
|
19
22
|
Description-Content-Type: text/markdown
|
20
|
-
License-File: LICENSE
|
23
|
+
License-File: LICENSE.txt
|
21
24
|
Requires-Dist: numpy
|
22
25
|
Requires-Dist: scipy
|
23
26
|
Requires-Dist: matplotlib
|
27
|
+
Requires-Dist: PyQt5; platform_system != "Windows"
|
24
28
|
Requires-Dist: astropy
|
25
|
-
Provides-Extra:
|
26
|
-
Requires-Dist:
|
27
|
-
|
28
|
-
Requires-Dist:
|
29
|
-
|
30
|
-
Requires-Dist: sphinx-gallery; extra == "dev"
|
31
|
-
Requires-Dist: sphinx_bootstrap_theme; extra == "dev"
|
29
|
+
Provides-Extra: linting
|
30
|
+
Requires-Dist: ruff; extra == "linting"
|
31
|
+
Provides-Extra: formatting
|
32
|
+
Requires-Dist: ruff; extra == "formatting"
|
33
|
+
Dynamic: license-file
|
32
34
|
|
33
35
|
[](https://anaconda.org/conda-forge/datastock)
|
34
36
|
[](https://anaconda.org/conda-forge/datastock)
|
@@ -73,15 +75,15 @@ Examples:
|
|
73
75
|
Straightforward array visualization:
|
74
76
|
------------------------------------
|
75
77
|
|
76
|
-
|
78
|
+
``
|
77
79
|
import datastock as ds
|
78
80
|
|
79
81
|
# any 1d, 2d or 3d array
|
80
|
-
aa = np.
|
82
|
+
aa = np.random((100, 100, 100))
|
81
83
|
|
82
84
|
# plot interactive figure using shortcut to method
|
83
85
|
dax = ds.plot_as_array(aa)
|
84
|
-
|
86
|
+
``
|
85
87
|
|
86
88
|
Now do **shift + left clic** on any axes, the rest of the interactive commands are automatically printed in your python console
|
87
89
|
|
@@ -107,7 +109,7 @@ Thanks to dref, the class knows the relationaships between all numpy arrays.
|
|
107
109
|
In particular it knows which arrays share the same references / dimensions
|
108
110
|
|
109
111
|
|
110
|
-
```
|
112
|
+
```python
|
111
113
|
import numpy as np
|
112
114
|
import datastock as ds
|
113
115
|
|
@@ -128,24 +130,24 @@ lprof = [(1 + np.cos(t)[:, None]) * x[None, :] for t in lt]
|
|
128
130
|
# Populate DataStock
|
129
131
|
|
130
132
|
# instanciate
|
131
|
-
|
133
|
+
coll = ds.DataStock()
|
132
134
|
|
133
135
|
# add references (i.e.: store size of each dimension under a unique key)
|
134
|
-
|
135
|
-
|
136
|
+
coll.add_ref(key='nc', size=nc)
|
137
|
+
coll.add_ref(key='nx', size=nx)
|
136
138
|
for ii, nt in enumerate(lnt):
|
137
|
-
|
139
|
+
coll.add_ref(key=f'nt{ii}', size=nt)
|
138
140
|
|
139
141
|
# add data dependening on these references
|
140
142
|
# you can, optionally, specify units, physical dimensionality (ex: distance, time...), quantity (ex: radius, height, ...) and name (to your liking)
|
141
143
|
|
142
|
-
|
144
|
+
coll.add_data(key='x', data=x, dimension='distance', quant='radius', units='m', ref='nx')
|
143
145
|
for ii, nt in enumerate(lnt):
|
144
|
-
|
145
|
-
|
146
|
+
coll.add_data(key=f't{ii}', data=lt[ii], dimension='time', units='s', ref=f'nt{ii}')
|
147
|
+
coll.add_data(key=f'prof{ii}', data=lprof[ii], dimension='velocity', units='m/s', ref=(f'nt{ii}', 'x'))
|
146
148
|
|
147
149
|
# print in the console the content of st
|
148
|
-
|
150
|
+
coll
|
149
151
|
```
|
150
152
|
|
151
153
|
<p align="center">
|
@@ -156,22 +158,22 @@ You can see that DataStock stores the relationships between each array and each
|
|
156
158
|
Specifying explicitly the references is only necessary if there is an ambiguity (i.e.: several references have the same size, like nx and nt2 in our case)
|
157
159
|
|
158
160
|
|
159
|
-
|
161
|
+
``
|
160
162
|
# plot any array interactively
|
161
|
-
dax =
|
162
|
-
dax =
|
163
|
-
dax =
|
164
|
-
dax =
|
165
|
-
|
163
|
+
dax = coll.plot_as_array('x')
|
164
|
+
dax = coll.plot_as_array('t0')
|
165
|
+
dax = coll.plot_as_array('prof0')
|
166
|
+
dax = coll.plot_as_array('prof0', keyX='t0', keyY='x', aspect='auto')
|
167
|
+
``
|
166
168
|
|
167
169
|
You can then decide to store any object category
|
168
170
|
Let's create a 'campaign' category to store the characteristics of each measurements campaign
|
169
171
|
and let's add a 'campaign' parameter to each profile data
|
170
172
|
|
171
|
-
|
173
|
+
``
|
172
174
|
# add arbitrary object category as sub-dict of self.dobj
|
173
175
|
for ii in range(nc):
|
174
|
-
|
176
|
+
coll.add_obj(
|
175
177
|
which='campaign',
|
176
178
|
key=f'c{ii}',
|
177
179
|
start_date=f'{ii}.04.2022',
|
@@ -182,16 +184,16 @@ for ii in range(nc):
|
|
182
184
|
)
|
183
185
|
|
184
186
|
# create new 'campaign' parameter for data arrays
|
185
|
-
|
187
|
+
coll.add_param('campaign', which='data')
|
186
188
|
|
187
189
|
# tag each data with its campaign
|
188
190
|
for ii in range(nc):
|
189
|
-
|
190
|
-
|
191
|
+
coll.set_param(which='data', key=f't{ii}', param='campaign', value=f'c{ii}')
|
192
|
+
coll.set_param(which='data', key=f'prof{ii}', param='campaign', value=f'c{ii}')
|
191
193
|
|
192
194
|
# print in the console the content of st
|
193
|
-
|
194
|
-
|
195
|
+
coll
|
196
|
+
``
|
195
197
|
|
196
198
|
<p align="center">
|
197
199
|
<img align="middle" src="https://github.com/ToFuProject/datastock/blob/devel/README_figures/DataStock_Obj.png" width="600" alt="Direct 3d array visualization"/>
|
@@ -200,31 +202,31 @@ st
|
|
200
202
|
DataStock also provides built-in object selection method to allow return all
|
201
203
|
objects matching a criterion, as lits of int indices, bool indices or keys.
|
202
204
|
|
203
|
-
|
204
|
-
In [9]:
|
205
|
+
``
|
206
|
+
In [9]: coll.select(which='campaign', index=2, returnas=int)
|
205
207
|
Out[9]: array([2])
|
206
208
|
|
207
209
|
# list of 2 => return all matches inside the interval
|
208
|
-
In [10]:
|
210
|
+
In [10]: coll.select(which='campaign', index=[2, 4], returnas=int)
|
209
211
|
Out[10]: array([2, 3, 4])
|
210
212
|
|
211
213
|
# tuple of 2 => return all matches outside the interval
|
212
|
-
In [11]:
|
214
|
+
In [11]: coll.select(which='campaign', index=(2, 4), returnas=int)
|
213
215
|
Out[11]: array([0, 1])
|
214
216
|
|
215
217
|
# return as keys
|
216
|
-
In [12]:
|
218
|
+
In [12]: coll.select(which='campaign', index=(2, 4), returnas=str)
|
217
219
|
Out[12]: array(['c0', 'c1'], dtype='<U2')
|
218
220
|
|
219
221
|
# return as bool indices
|
220
|
-
In [13]:
|
222
|
+
In [13]: coll.select(which='campaign', index=(2, 4), returnas=bool)
|
221
223
|
Out[13]: array([ True, True, False, False, False])
|
222
224
|
|
223
225
|
# You can combine as many constraints as needed
|
224
|
-
In [17]:
|
226
|
+
In [17]: coll.select(which='campaign', index=[2, 4], operator='Barnaby', returnas=str)
|
225
227
|
Out[17]: array(['c3', 'c4'], dtype='<U2')
|
226
228
|
|
227
|
-
|
229
|
+
``
|
228
230
|
|
229
231
|
You can also decide to sub-class DataStock to implement methods and visualizations specific to your needs
|
230
232
|
|
@@ -237,6 +239,6 @@ DataStock provides built-in methods like:
|
|
237
239
|
- size is the total size of all data stored in the instance in bytes
|
238
240
|
- dsize is a dict with the detail (size for each item in each sub-dict of the instance)
|
239
241
|
* `save()`: will save the instance
|
240
|
-
* `
|
242
|
+
* `coll.load()`: will load a saved instance
|
241
243
|
|
242
244
|
|
@@ -41,15 +41,15 @@ Examples:
|
|
41
41
|
Straightforward array visualization:
|
42
42
|
------------------------------------
|
43
43
|
|
44
|
-
|
44
|
+
``
|
45
45
|
import datastock as ds
|
46
46
|
|
47
47
|
# any 1d, 2d or 3d array
|
48
|
-
aa = np.
|
48
|
+
aa = np.random((100, 100, 100))
|
49
49
|
|
50
50
|
# plot interactive figure using shortcut to method
|
51
51
|
dax = ds.plot_as_array(aa)
|
52
|
-
|
52
|
+
``
|
53
53
|
|
54
54
|
Now do **shift + left clic** on any axes, the rest of the interactive commands are automatically printed in your python console
|
55
55
|
|
@@ -75,7 +75,7 @@ Thanks to dref, the class knows the relationaships between all numpy arrays.
|
|
75
75
|
In particular it knows which arrays share the same references / dimensions
|
76
76
|
|
77
77
|
|
78
|
-
```
|
78
|
+
```python
|
79
79
|
import numpy as np
|
80
80
|
import datastock as ds
|
81
81
|
|
@@ -96,24 +96,24 @@ lprof = [(1 + np.cos(t)[:, None]) * x[None, :] for t in lt]
|
|
96
96
|
# Populate DataStock
|
97
97
|
|
98
98
|
# instanciate
|
99
|
-
|
99
|
+
coll = ds.DataStock()
|
100
100
|
|
101
101
|
# add references (i.e.: store size of each dimension under a unique key)
|
102
|
-
|
103
|
-
|
102
|
+
coll.add_ref(key='nc', size=nc)
|
103
|
+
coll.add_ref(key='nx', size=nx)
|
104
104
|
for ii, nt in enumerate(lnt):
|
105
|
-
|
105
|
+
coll.add_ref(key=f'nt{ii}', size=nt)
|
106
106
|
|
107
107
|
# add data dependening on these references
|
108
108
|
# you can, optionally, specify units, physical dimensionality (ex: distance, time...), quantity (ex: radius, height, ...) and name (to your liking)
|
109
109
|
|
110
|
-
|
110
|
+
coll.add_data(key='x', data=x, dimension='distance', quant='radius', units='m', ref='nx')
|
111
111
|
for ii, nt in enumerate(lnt):
|
112
|
-
|
113
|
-
|
112
|
+
coll.add_data(key=f't{ii}', data=lt[ii], dimension='time', units='s', ref=f'nt{ii}')
|
113
|
+
coll.add_data(key=f'prof{ii}', data=lprof[ii], dimension='velocity', units='m/s', ref=(f'nt{ii}', 'x'))
|
114
114
|
|
115
115
|
# print in the console the content of st
|
116
|
-
|
116
|
+
coll
|
117
117
|
```
|
118
118
|
|
119
119
|
<p align="center">
|
@@ -124,22 +124,22 @@ You can see that DataStock stores the relationships between each array and each
|
|
124
124
|
Specifying explicitly the references is only necessary if there is an ambiguity (i.e.: several references have the same size, like nx and nt2 in our case)
|
125
125
|
|
126
126
|
|
127
|
-
|
127
|
+
``
|
128
128
|
# plot any array interactively
|
129
|
-
dax =
|
130
|
-
dax =
|
131
|
-
dax =
|
132
|
-
dax =
|
133
|
-
|
129
|
+
dax = coll.plot_as_array('x')
|
130
|
+
dax = coll.plot_as_array('t0')
|
131
|
+
dax = coll.plot_as_array('prof0')
|
132
|
+
dax = coll.plot_as_array('prof0', keyX='t0', keyY='x', aspect='auto')
|
133
|
+
``
|
134
134
|
|
135
135
|
You can then decide to store any object category
|
136
136
|
Let's create a 'campaign' category to store the characteristics of each measurements campaign
|
137
137
|
and let's add a 'campaign' parameter to each profile data
|
138
138
|
|
139
|
-
|
139
|
+
``
|
140
140
|
# add arbitrary object category as sub-dict of self.dobj
|
141
141
|
for ii in range(nc):
|
142
|
-
|
142
|
+
coll.add_obj(
|
143
143
|
which='campaign',
|
144
144
|
key=f'c{ii}',
|
145
145
|
start_date=f'{ii}.04.2022',
|
@@ -150,16 +150,16 @@ for ii in range(nc):
|
|
150
150
|
)
|
151
151
|
|
152
152
|
# create new 'campaign' parameter for data arrays
|
153
|
-
|
153
|
+
coll.add_param('campaign', which='data')
|
154
154
|
|
155
155
|
# tag each data with its campaign
|
156
156
|
for ii in range(nc):
|
157
|
-
|
158
|
-
|
157
|
+
coll.set_param(which='data', key=f't{ii}', param='campaign', value=f'c{ii}')
|
158
|
+
coll.set_param(which='data', key=f'prof{ii}', param='campaign', value=f'c{ii}')
|
159
159
|
|
160
160
|
# print in the console the content of st
|
161
|
-
|
162
|
-
|
161
|
+
coll
|
162
|
+
``
|
163
163
|
|
164
164
|
<p align="center">
|
165
165
|
<img align="middle" src="https://github.com/ToFuProject/datastock/blob/devel/README_figures/DataStock_Obj.png" width="600" alt="Direct 3d array visualization"/>
|
@@ -168,31 +168,31 @@ st
|
|
168
168
|
DataStock also provides built-in object selection method to allow return all
|
169
169
|
objects matching a criterion, as lits of int indices, bool indices or keys.
|
170
170
|
|
171
|
-
|
172
|
-
In [9]:
|
171
|
+
``
|
172
|
+
In [9]: coll.select(which='campaign', index=2, returnas=int)
|
173
173
|
Out[9]: array([2])
|
174
174
|
|
175
175
|
# list of 2 => return all matches inside the interval
|
176
|
-
In [10]:
|
176
|
+
In [10]: coll.select(which='campaign', index=[2, 4], returnas=int)
|
177
177
|
Out[10]: array([2, 3, 4])
|
178
178
|
|
179
179
|
# tuple of 2 => return all matches outside the interval
|
180
|
-
In [11]:
|
180
|
+
In [11]: coll.select(which='campaign', index=(2, 4), returnas=int)
|
181
181
|
Out[11]: array([0, 1])
|
182
182
|
|
183
183
|
# return as keys
|
184
|
-
In [12]:
|
184
|
+
In [12]: coll.select(which='campaign', index=(2, 4), returnas=str)
|
185
185
|
Out[12]: array(['c0', 'c1'], dtype='<U2')
|
186
186
|
|
187
187
|
# return as bool indices
|
188
|
-
In [13]:
|
188
|
+
In [13]: coll.select(which='campaign', index=(2, 4), returnas=bool)
|
189
189
|
Out[13]: array([ True, True, False, False, False])
|
190
190
|
|
191
191
|
# You can combine as many constraints as needed
|
192
|
-
In [17]:
|
192
|
+
In [17]: coll.select(which='campaign', index=[2, 4], operator='Barnaby', returnas=str)
|
193
193
|
Out[17]: array(['c3', 'c4'], dtype='<U2')
|
194
194
|
|
195
|
-
|
195
|
+
``
|
196
196
|
|
197
197
|
You can also decide to sub-class DataStock to implement methods and visualizations specific to your needs
|
198
198
|
|
@@ -205,6 +205,6 @@ DataStock provides built-in methods like:
|
|
205
205
|
- size is the total size of all data stored in the instance in bytes
|
206
206
|
- dsize is a dict with the detail (size for each item in each sub-dict of the instance)
|
207
207
|
* `save()`: will save the instance
|
208
|
-
* `
|
208
|
+
* `coll.load()`: will load a saved instance
|
209
209
|
|
210
210
|
|
Binary file
|
Binary file
|
Binary file
|
@@ -0,0 +1,31 @@
|
|
1
|
+
# ###############
|
2
|
+
# __version__
|
3
|
+
# ###############
|
4
|
+
|
5
|
+
|
6
|
+
from . import _version
|
7
|
+
__version__ = _version.version
|
8
|
+
__version_tuple__ = _version.version_tuple
|
9
|
+
|
10
|
+
|
11
|
+
# from setuptools_scm import get_version
|
12
|
+
# __version__ = get_version(root='..', relative_to=__file__)
|
13
|
+
|
14
|
+
|
15
|
+
# from importlib.metadata import version
|
16
|
+
# __version__ = version(__package__)
|
17
|
+
# cleanup
|
18
|
+
# del get_version
|
19
|
+
|
20
|
+
|
21
|
+
# ###############
|
22
|
+
# sub-packages
|
23
|
+
# ###############
|
24
|
+
|
25
|
+
|
26
|
+
from . import _generic_check
|
27
|
+
from ._generic_utils_plot import *
|
28
|
+
from ._class import DataStock
|
29
|
+
from ._saveload import load, get_files
|
30
|
+
from ._direct_calls import *
|
31
|
+
from . import tests
|