datastock 0.0.49__tar.gz → 0.0.50__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (69) hide show
  1. datastock-0.0.50/.github/workflows/python-publish.yml +35 -0
  2. datastock-0.0.50/.github/workflows/python-testing-matrix.yml +41 -0
  3. datastock-0.0.50/.gitignore +135 -0
  4. datastock-0.0.50/CLASSIFIERS.txt +10 -0
  5. datastock-0.0.49/LICENSE → datastock-0.0.50/LICENSE.txt +1 -1
  6. {datastock-0.0.49 → datastock-0.0.50}/MANIFEST.in +1 -0
  7. {datastock-0.0.49/datastock.egg-info → datastock-0.0.50}/PKG-INFO +50 -63
  8. {datastock-0.0.49 → datastock-0.0.50}/README.md +34 -34
  9. datastock-0.0.50/README_figures/DataStock_Obj.png +0 -0
  10. datastock-0.0.50/README_figures/DataStock_refdata.png +0 -0
  11. datastock-0.0.50/README_figures/DirectVisualization_3d.png +0 -0
  12. datastock-0.0.50/datastock/__init__.py +31 -0
  13. datastock-0.0.50/datastock/_version.py +21 -0
  14. datastock-0.0.50/datastock/tests/prepublish.py +3 -0
  15. {datastock-0.0.49 → datastock-0.0.50}/datastock/version.py +1 -1
  16. {datastock-0.0.49 → datastock-0.0.50/datastock.egg-info}/PKG-INFO +50 -63
  17. {datastock-0.0.49 → datastock-0.0.50}/datastock.egg-info/SOURCES.txt +10 -3
  18. datastock-0.0.50/datastock.egg-info/requires.txt +13 -0
  19. datastock-0.0.50/datastock.egg-info/top_level.txt +1 -0
  20. datastock-0.0.50/pyproject.toml +69 -0
  21. datastock-0.0.50/setup.cfg +4 -0
  22. datastock-0.0.49/datastock/__init__.py +0 -10
  23. datastock-0.0.49/datastock.egg-info/requires.txt +0 -18
  24. datastock-0.0.49/datastock.egg-info/top_level.txt +0 -3
  25. datastock-0.0.49/pyproject.toml +0 -64
  26. datastock-0.0.49/setup.cfg +0 -7
  27. datastock-0.0.49/setup.py +0 -178
  28. {datastock-0.0.49 → datastock-0.0.50}/_updateversion.py +0 -0
  29. {datastock-0.0.49 → datastock-0.0.50}/datastock/_DataCollection_utils.py +0 -0
  30. {datastock-0.0.49 → datastock-0.0.50}/datastock/_class.py +0 -0
  31. {datastock-0.0.49 → datastock-0.0.50}/datastock/_class0.py +0 -0
  32. {datastock-0.0.49 → datastock-0.0.50}/datastock/_class1.py +0 -0
  33. {datastock-0.0.49 → datastock-0.0.50}/datastock/_class1_binning.py +0 -0
  34. {datastock-0.0.49 → datastock-0.0.50}/datastock/_class1_check.py +0 -0
  35. {datastock-0.0.49 → datastock-0.0.50}/datastock/_class1_color_touch.py +0 -0
  36. {datastock-0.0.49 → datastock-0.0.50}/datastock/_class1_compute.py +0 -0
  37. {datastock-0.0.49 → datastock-0.0.50}/datastock/_class1_domain.py +0 -0
  38. {datastock-0.0.49 → datastock-0.0.50}/datastock/_class1_interpolate.py +0 -0
  39. {datastock-0.0.49 → datastock-0.0.50}/datastock/_class1_show.py +0 -0
  40. {datastock-0.0.49 → datastock-0.0.50}/datastock/_class1_uniformize.py +0 -0
  41. {datastock-0.0.49 → datastock-0.0.50}/datastock/_class2.py +0 -0
  42. {datastock-0.0.49 → datastock-0.0.50}/datastock/_class2_interactivity.py +0 -0
  43. {datastock-0.0.49 → datastock-0.0.50}/datastock/_class3.py +0 -0
  44. {datastock-0.0.49 → datastock-0.0.50}/datastock/_direct_calls.py +0 -0
  45. {datastock-0.0.49 → datastock-0.0.50}/datastock/_export_dataframe.py +0 -0
  46. {datastock-0.0.49 → datastock-0.0.50}/datastock/_find_plateau.py +0 -0
  47. {datastock-0.0.49 → datastock-0.0.50}/datastock/_generic_check.py +0 -0
  48. {datastock-0.0.49 → datastock-0.0.50}/datastock/_generic_utils.py +0 -0
  49. {datastock-0.0.49 → datastock-0.0.50}/datastock/_generic_utils_plot.py +0 -0
  50. {datastock-0.0.49 → datastock-0.0.50}/datastock/_plot_BvsA_as_distribution.py +0 -0
  51. {datastock-0.0.49 → datastock-0.0.50}/datastock/_plot_BvsA_as_distribution_check.py +0 -0
  52. {datastock-0.0.49 → datastock-0.0.50}/datastock/_plot_as_array.py +0 -0
  53. {datastock-0.0.49 → datastock-0.0.50}/datastock/_plot_as_array_1d.py +0 -0
  54. {datastock-0.0.49 → datastock-0.0.50}/datastock/_plot_as_array_234d.py +0 -0
  55. {datastock-0.0.49 → datastock-0.0.50}/datastock/_plot_as_mobile_lines.py +0 -0
  56. {datastock-0.0.49 → datastock-0.0.50}/datastock/_plot_as_profile1d.py +0 -0
  57. {datastock-0.0.49 → datastock-0.0.50}/datastock/_plot_correlations.py +0 -0
  58. {datastock-0.0.49 → datastock-0.0.50}/datastock/_plot_old_backup.py +0 -0
  59. {datastock-0.0.49 → datastock-0.0.50}/datastock/_plot_text.py +0 -0
  60. {datastock-0.0.49 → datastock-0.0.50}/datastock/_saveload.py +0 -0
  61. {datastock-0.0.49 → datastock-0.0.50}/datastock/tests/__init__.py +0 -0
  62. {datastock-0.0.49 → datastock-0.0.50}/datastock/tests/output/__init__.py +0 -0
  63. {datastock-0.0.49 → datastock-0.0.50}/datastock/tests/test_01_DataStock.py +0 -0
  64. {datastock-0.0.49 → datastock-0.0.50}/datastock.egg-info/dependency_links.txt +0 -0
  65. {datastock-0.0.49 → datastock-0.0.50}/datastock.egg-info/entry_points.txt +0 -0
  66. {datastock-0.0.49 → datastock-0.0.50}/scripts/__init__.py +0 -0
  67. {datastock-0.0.49 → datastock-0.0.50}/scripts/_bash_version.py +0 -0
  68. {datastock-0.0.49 → datastock-0.0.50}/scripts/_dparser.py +0 -0
  69. {datastock-0.0.49 → datastock-0.0.50}/scripts/main.py +0 -0
@@ -0,0 +1,35 @@
1
+ # This workflow will upload a Python Package using Twine when a release is created
2
+ # For more information see: https://help.github.com/en/actions/language-and-framework-guides/using-python-with-github-actions#publishing-to-package-registries
3
+
4
+ # This workflow uses actions that are not certified by GitHub.
5
+ # They are provided by a third-party and are governed by
6
+ # separate terms of service, privacy policy, and support
7
+ # documentation.
8
+
9
+ name: Upload Python Package
10
+
11
+ on:
12
+ push:
13
+ tags:
14
+ - '*'
15
+ branches:
16
+ - main
17
+ release:
18
+ types: [created]
19
+
20
+ jobs:
21
+ pypi:
22
+ name: Publish sdist to Pypi
23
+ runs-on: ubuntu-latest
24
+ steps:
25
+ - uses: actions/checkout@v4
26
+ - uses: astral-sh/setup-uv@v5
27
+ with:
28
+ python-version: '3.11'
29
+ - run: uv build
30
+ # Check that basic features work and we didn't miss to include crucial files
31
+ - name: import test (wheel)
32
+ run: uv run --isolated --no-project -p 3.11 --with dist/*.whl datastock/tests/prepublish.py
33
+ - name: import test (source distribution)
34
+ run: uv run --isolated --no-project -p 3.11 --with dist/*.tar.gz datastock/tests/prepublish.py
35
+ - run: uv publish -t ${{ secrets.PYPI_API_TOKEN }}
@@ -0,0 +1,41 @@
1
+ # This workflow will install Python dependencies, run tests and lint with a variety of Python versions
2
+ # For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions
3
+
4
+ name: Testing matrix
5
+
6
+ on:
7
+ push:
8
+ branches: [ devel ]
9
+ pull_request:
10
+ branches: [ devel ]
11
+
12
+ jobs:
13
+ build:
14
+
15
+ runs-on: ${{ matrix.os }}
16
+
17
+ strategy:
18
+ fail-fast: true
19
+ matrix:
20
+ os: [ubuntu-latest, windows-latest, macos-latest]
21
+ python-version: ["3.8", "3.9", "3.10", "3.11"]
22
+
23
+ steps:
24
+
25
+ # git checkout
26
+ - uses: actions/checkout@v4
27
+
28
+ # Install uv
29
+ - name: Install uv
30
+ uses: astral-sh/setup-uv@v5
31
+ with:
32
+ python-version: ${{ matrix.python-version }}
33
+
34
+ # Install library
35
+ - name: Install the project
36
+ run: uv sync --all-extras --dev
37
+
38
+ # Run tests
39
+ - name: Run tests
40
+ # For example, using `pytest`
41
+ run: uv run pytest datastock/tests
@@ -0,0 +1,135 @@
1
+ # Byte-compiled / optimized / DLL files
2
+ __pycache__/
3
+ *.py[cod]
4
+ *$py.class
5
+
6
+ # C extensions
7
+ *.so
8
+
9
+ # single sourcing verion file
10
+ datastock/_version.py
11
+
12
+ # Distribution / packaging
13
+ .Python
14
+ build/
15
+ develop-eggs/
16
+ dist/
17
+ downloads/
18
+ eggs/
19
+ .eggs/
20
+ lib/
21
+ lib64/
22
+ parts/
23
+ sdist/
24
+ var/
25
+ wheels/
26
+ pip-wheel-metadata/
27
+ share/python-wheels/
28
+ *.egg-info/
29
+ .installed.cfg
30
+ *.egg
31
+ MANIFEST
32
+
33
+ # PyInstaller
34
+ # Usually these files are written by a python script from a template
35
+ # before PyInstaller builds the exe, so as to inject date/other infos into it.
36
+ *.manifest
37
+ *.spec
38
+
39
+ # Installer logs
40
+ pip-log.txt
41
+ pip-delete-this-directory.txt
42
+
43
+ # Unit test / coverage reports
44
+ htmlcov/
45
+ .tox/
46
+ .nox/
47
+ .coverage
48
+ .coverage.*
49
+ .cache
50
+ nosetests.xml
51
+ coverage.xml
52
+ *.cover
53
+ *.py,cover
54
+ .hypothesis/
55
+ .pytest_cache/
56
+
57
+ # Translations
58
+ *.mo
59
+ *.pot
60
+
61
+ # Django stuff:
62
+ *.log
63
+ local_settings.py
64
+ db.sqlite3
65
+ db.sqlite3-journal
66
+
67
+ # Flask stuff:
68
+ instance/
69
+ .webassets-cache
70
+
71
+ # Scrapy stuff:
72
+ .scrapy
73
+
74
+ # Sphinx documentation
75
+ docs/_build/
76
+
77
+ # PyBuilder
78
+ target/
79
+
80
+ # Jupyter Notebook
81
+ .ipynb_checkpoints
82
+
83
+ # IPython
84
+ profile_default/
85
+ ipython_config.py
86
+
87
+ # pyenv
88
+ .python-version
89
+
90
+ # pipenv
91
+ # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
92
+ # However, in case of collaboration, if having platform-specific dependencies or dependencies
93
+ # having no cross-platform support, pipenv may install dependencies that don't work, or not
94
+ # install all needed dependencies.
95
+ #Pipfile.lock
96
+
97
+ # PEP 582; used by e.g. github.com/David-OConnor/pyflow
98
+ __pypackages__/
99
+
100
+ # Celery stuff
101
+ celerybeat-schedule
102
+ celerybeat.pid
103
+
104
+ # SageMath parsed files
105
+ *.sage.py
106
+
107
+ # Environments
108
+ .env
109
+ .venv
110
+ env/
111
+ venv/
112
+ ENV/
113
+ env.bak/
114
+ venv.bak/
115
+
116
+ # Spyder project settings
117
+ .spyderproject
118
+ .spyproject
119
+
120
+ # Rope project settings
121
+ .ropeproject
122
+
123
+ # mkdocs documentation
124
+ /site
125
+
126
+ # mypy
127
+ .mypy_cache/
128
+ .dmypy.json
129
+ dmypy.json
130
+
131
+ # Pyre type checker
132
+ .pyre/
133
+
134
+ # vim swap file
135
+ *.swp
@@ -0,0 +1,10 @@
1
+ Development Status :: 5 - Production/Stable
2
+ Intended Audience :: Science/Research
3
+ Programming Language :: Python :: 3
4
+ Programming Language :: Python :: 3.6
5
+ Programming Language :: Python :: 3.7
6
+ Programming Language :: Python :: 3.8
7
+ Programming Language :: Python :: 3.9
8
+ Programming Language :: Python :: 3.10
9
+ Programming Language :: Python :: 3.11
10
+ Natural Language :: English
@@ -1,6 +1,6 @@
1
1
  MIT License
2
2
 
3
- Copyright (c) 2022 ToFuProject
3
+ Copyright (c) 2023 ToFuProject
4
4
 
5
5
  Permission is hereby granted, free of charge, to any person obtaining a copy
6
6
  of this software and associated documentation files (the "Software"), to deal
@@ -3,3 +3,4 @@
3
3
  include MANIFEST.in
4
4
  include LICENSE.txt
5
5
  include pyproject.toml
6
+ include CLASSIFIERS.txt
@@ -1,33 +1,12 @@
1
- Metadata-Version: 2.1
1
+ Metadata-Version: 2.4
2
2
  Name: datastock
3
- Version: 0.0.49
3
+ Version: 0.0.50
4
4
  Summary: Generic handler for multiple heterogenous numpy arrays and subclasses
5
- Home-page: https://github.com/ToFuProject/datastock
6
- Author: Didier VEZINET
7
5
  Author-email: Didier VEZINET <didier.vezinet@gmail.com>
8
6
  Maintainer-email: Didier VEZINET <didier.vezinet@gmail.com>
9
- License: MIT License
10
-
11
- Copyright (c) 2022 ToFuProject
12
-
13
- Permission is hereby granted, free of charge, to any person obtaining a copy
14
- of this software and associated documentation files (the "Software"), to deal
15
- in the Software without restriction, including without limitation the rights
16
- to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
17
- copies of the Software, and to permit persons to whom the Software is
18
- furnished to do so, subject to the following conditions:
19
-
20
- The above copyright notice and this permission notice shall be included in all
21
- copies or substantial portions of the Software.
22
-
23
- THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
24
- IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
25
- FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
26
- AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
27
- LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
28
- OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
29
- SOFTWARE.
30
-
7
+ License: MIT
8
+ Project-URL: Homepage, https://github.com/ToFuProject/datastock
9
+ Project-URL: Issues, https://github.com/ToFuProject/datastock/issues
31
10
  Keywords: data,analysis,interactive,heterogeneous arrays,numpy,Collection
32
11
  Classifier: Development Status :: 5 - Production/Stable
33
12
  Classifier: Intended Audience :: Science/Research
@@ -38,12 +17,20 @@ Classifier: Programming Language :: Python :: 3.8
38
17
  Classifier: Programming Language :: Python :: 3.9
39
18
  Classifier: Programming Language :: Python :: 3.10
40
19
  Classifier: Programming Language :: Python :: 3.11
41
- Requires-Python: >=3.6
20
+ Classifier: Natural Language :: English
21
+ Requires-Python: >=3.8
42
22
  Description-Content-Type: text/markdown
43
- Provides-Extra: dev
23
+ License-File: LICENSE.txt
24
+ Requires-Dist: numpy
25
+ Requires-Dist: scipy
26
+ Requires-Dist: matplotlib
27
+ Requires-Dist: PyQt5; platform_system != "Windows"
28
+ Requires-Dist: astropy
44
29
  Provides-Extra: linting
30
+ Requires-Dist: ruff; extra == "linting"
45
31
  Provides-Extra: formatting
46
- License-File: LICENSE
32
+ Requires-Dist: ruff; extra == "formatting"
33
+ Dynamic: license-file
47
34
 
48
35
  [![Conda]( https://anaconda.org/conda-forge/datastock/badges/version.svg)](https://anaconda.org/conda-forge/datastock)
49
36
  [![](https://anaconda.org/conda-forge/datastock/badges/downloads.svg)](https://anaconda.org/conda-forge/datastock)
@@ -88,15 +75,15 @@ Examples:
88
75
  Straightforward array visualization:
89
76
  ------------------------------------
90
77
 
91
- ```
78
+ ``
92
79
  import datastock as ds
93
80
 
94
81
  # any 1d, 2d or 3d array
95
- aa = np.np.random.random((100, 100, 100))
82
+ aa = np.random((100, 100, 100))
96
83
 
97
84
  # plot interactive figure using shortcut to method
98
85
  dax = ds.plot_as_array(aa)
99
- ```
86
+ ``
100
87
 
101
88
  Now do **shift + left clic** on any axes, the rest of the interactive commands are automatically printed in your python console
102
89
 
@@ -122,7 +109,7 @@ Thanks to dref, the class knows the relationaships between all numpy arrays.
122
109
  In particular it knows which arrays share the same references / dimensions
123
110
 
124
111
 
125
- ```
112
+ ```python
126
113
  import numpy as np
127
114
  import datastock as ds
128
115
 
@@ -143,24 +130,24 @@ lprof = [(1 + np.cos(t)[:, None]) * x[None, :] for t in lt]
143
130
  # Populate DataStock
144
131
 
145
132
  # instanciate
146
- st = ds.DataStock()
133
+ coll = ds.DataStock()
147
134
 
148
135
  # add references (i.e.: store size of each dimension under a unique key)
149
- st.add_ref(key='nc', size=nc)
150
- st.add_ref(key='nx', size=nx)
136
+ coll.add_ref(key='nc', size=nc)
137
+ coll.add_ref(key='nx', size=nx)
151
138
  for ii, nt in enumerate(lnt):
152
- st.add_ref(key=f'nt{ii}', size=nt)
139
+ coll.add_ref(key=f'nt{ii}', size=nt)
153
140
 
154
141
  # add data dependening on these references
155
142
  # you can, optionally, specify units, physical dimensionality (ex: distance, time...), quantity (ex: radius, height, ...) and name (to your liking)
156
143
 
157
- st.add_data(key='x', data=x, dimension='distance', quant='radius', units='m', ref='nx')
144
+ coll.add_data(key='x', data=x, dimension='distance', quant='radius', units='m', ref='nx')
158
145
  for ii, nt in enumerate(lnt):
159
- st.add_data(key=f't{ii}', data=lt[ii], dimension='time', units='s', ref=f'nt{ii}')
160
- st.add_data(key=f'prof{ii}', data=lprof[ii], dimension='velocity', units='m/s', ref=(f'nt{ii}', 'x'))
146
+ coll.add_data(key=f't{ii}', data=lt[ii], dimension='time', units='s', ref=f'nt{ii}')
147
+ coll.add_data(key=f'prof{ii}', data=lprof[ii], dimension='velocity', units='m/s', ref=(f'nt{ii}', 'x'))
161
148
 
162
149
  # print in the console the content of st
163
- st
150
+ coll
164
151
  ```
165
152
 
166
153
  <p align="center">
@@ -171,22 +158,22 @@ You can see that DataStock stores the relationships between each array and each
171
158
  Specifying explicitly the references is only necessary if there is an ambiguity (i.e.: several references have the same size, like nx and nt2 in our case)
172
159
 
173
160
 
174
- ```
161
+ ``
175
162
  # plot any array interactively
176
- dax = st.plot_as_array('x')
177
- dax = st.plot_as_array('t0')
178
- dax = st.plot_as_array('prof0')
179
- dax = st.plot_as_array('prof0', keyX='t0', keyY='x', aspect='auto')
180
- ```
163
+ dax = coll.plot_as_array('x')
164
+ dax = coll.plot_as_array('t0')
165
+ dax = coll.plot_as_array('prof0')
166
+ dax = coll.plot_as_array('prof0', keyX='t0', keyY='x', aspect='auto')
167
+ ``
181
168
 
182
169
  You can then decide to store any object category
183
170
  Let's create a 'campaign' category to store the characteristics of each measurements campaign
184
171
  and let's add a 'campaign' parameter to each profile data
185
172
 
186
- ```
173
+ ``
187
174
  # add arbitrary object category as sub-dict of self.dobj
188
175
  for ii in range(nc):
189
- st.add_obj(
176
+ coll.add_obj(
190
177
  which='campaign',
191
178
  key=f'c{ii}',
192
179
  start_date=f'{ii}.04.2022',
@@ -197,16 +184,16 @@ for ii in range(nc):
197
184
  )
198
185
 
199
186
  # create new 'campaign' parameter for data arrays
200
- st.add_param('campaign', which='data')
187
+ coll.add_param('campaign', which='data')
201
188
 
202
189
  # tag each data with its campaign
203
190
  for ii in range(nc):
204
- st.set_param(which='data', key=f't{ii}', param='campaign', value=f'c{ii}')
205
- st.set_param(which='data', key=f'prof{ii}', param='campaign', value=f'c{ii}')
191
+ coll.set_param(which='data', key=f't{ii}', param='campaign', value=f'c{ii}')
192
+ coll.set_param(which='data', key=f'prof{ii}', param='campaign', value=f'c{ii}')
206
193
 
207
194
  # print in the console the content of st
208
- st
209
- ```
195
+ coll
196
+ ``
210
197
 
211
198
  <p align="center">
212
199
  <img align="middle" src="https://github.com/ToFuProject/datastock/blob/devel/README_figures/DataStock_Obj.png" width="600" alt="Direct 3d array visualization"/>
@@ -215,31 +202,31 @@ st
215
202
  DataStock also provides built-in object selection method to allow return all
216
203
  objects matching a criterion, as lits of int indices, bool indices or keys.
217
204
 
218
- ```
219
- In [9]: st.select(which='campaign', index=2, returnas=int)
205
+ ``
206
+ In [9]: coll.select(which='campaign', index=2, returnas=int)
220
207
  Out[9]: array([2])
221
208
 
222
209
  # list of 2 => return all matches inside the interval
223
- In [10]: st.select(which='campaign', index=[2, 4], returnas=int)
210
+ In [10]: coll.select(which='campaign', index=[2, 4], returnas=int)
224
211
  Out[10]: array([2, 3, 4])
225
212
 
226
213
  # tuple of 2 => return all matches outside the interval
227
- In [11]: st.select(which='campaign', index=(2, 4), returnas=int)
214
+ In [11]: coll.select(which='campaign', index=(2, 4), returnas=int)
228
215
  Out[11]: array([0, 1])
229
216
 
230
217
  # return as keys
231
- In [12]: st.select(which='campaign', index=(2, 4), returnas=str)
218
+ In [12]: coll.select(which='campaign', index=(2, 4), returnas=str)
232
219
  Out[12]: array(['c0', 'c1'], dtype='<U2')
233
220
 
234
221
  # return as bool indices
235
- In [13]: st.select(which='campaign', index=(2, 4), returnas=bool)
222
+ In [13]: coll.select(which='campaign', index=(2, 4), returnas=bool)
236
223
  Out[13]: array([ True, True, False, False, False])
237
224
 
238
225
  # You can combine as many constraints as needed
239
- In [17]: st.select(which='campaign', index=[2, 4], operator='Barnaby', returnas=str)
226
+ In [17]: coll.select(which='campaign', index=[2, 4], operator='Barnaby', returnas=str)
240
227
  Out[17]: array(['c3', 'c4'], dtype='<U2')
241
228
 
242
- ```
229
+ ``
243
230
 
244
231
  You can also decide to sub-class DataStock to implement methods and visualizations specific to your needs
245
232
 
@@ -252,6 +239,6 @@ DataStock provides built-in methods like:
252
239
  - size is the total size of all data stored in the instance in bytes
253
240
  - dsize is a dict with the detail (size for each item in each sub-dict of the instance)
254
241
  * `save()`: will save the instance
255
- * `ds.load()`: will load a saved instance
242
+ * `coll.load()`: will load a saved instance
256
243
 
257
244
 
@@ -41,15 +41,15 @@ Examples:
41
41
  Straightforward array visualization:
42
42
  ------------------------------------
43
43
 
44
- ```
44
+ ``
45
45
  import datastock as ds
46
46
 
47
47
  # any 1d, 2d or 3d array
48
- aa = np.np.random.random((100, 100, 100))
48
+ aa = np.random((100, 100, 100))
49
49
 
50
50
  # plot interactive figure using shortcut to method
51
51
  dax = ds.plot_as_array(aa)
52
- ```
52
+ ``
53
53
 
54
54
  Now do **shift + left clic** on any axes, the rest of the interactive commands are automatically printed in your python console
55
55
 
@@ -75,7 +75,7 @@ Thanks to dref, the class knows the relationaships between all numpy arrays.
75
75
  In particular it knows which arrays share the same references / dimensions
76
76
 
77
77
 
78
- ```
78
+ ```python
79
79
  import numpy as np
80
80
  import datastock as ds
81
81
 
@@ -96,24 +96,24 @@ lprof = [(1 + np.cos(t)[:, None]) * x[None, :] for t in lt]
96
96
  # Populate DataStock
97
97
 
98
98
  # instanciate
99
- st = ds.DataStock()
99
+ coll = ds.DataStock()
100
100
 
101
101
  # add references (i.e.: store size of each dimension under a unique key)
102
- st.add_ref(key='nc', size=nc)
103
- st.add_ref(key='nx', size=nx)
102
+ coll.add_ref(key='nc', size=nc)
103
+ coll.add_ref(key='nx', size=nx)
104
104
  for ii, nt in enumerate(lnt):
105
- st.add_ref(key=f'nt{ii}', size=nt)
105
+ coll.add_ref(key=f'nt{ii}', size=nt)
106
106
 
107
107
  # add data dependening on these references
108
108
  # you can, optionally, specify units, physical dimensionality (ex: distance, time...), quantity (ex: radius, height, ...) and name (to your liking)
109
109
 
110
- st.add_data(key='x', data=x, dimension='distance', quant='radius', units='m', ref='nx')
110
+ coll.add_data(key='x', data=x, dimension='distance', quant='radius', units='m', ref='nx')
111
111
  for ii, nt in enumerate(lnt):
112
- st.add_data(key=f't{ii}', data=lt[ii], dimension='time', units='s', ref=f'nt{ii}')
113
- st.add_data(key=f'prof{ii}', data=lprof[ii], dimension='velocity', units='m/s', ref=(f'nt{ii}', 'x'))
112
+ coll.add_data(key=f't{ii}', data=lt[ii], dimension='time', units='s', ref=f'nt{ii}')
113
+ coll.add_data(key=f'prof{ii}', data=lprof[ii], dimension='velocity', units='m/s', ref=(f'nt{ii}', 'x'))
114
114
 
115
115
  # print in the console the content of st
116
- st
116
+ coll
117
117
  ```
118
118
 
119
119
  <p align="center">
@@ -124,22 +124,22 @@ You can see that DataStock stores the relationships between each array and each
124
124
  Specifying explicitly the references is only necessary if there is an ambiguity (i.e.: several references have the same size, like nx and nt2 in our case)
125
125
 
126
126
 
127
- ```
127
+ ``
128
128
  # plot any array interactively
129
- dax = st.plot_as_array('x')
130
- dax = st.plot_as_array('t0')
131
- dax = st.plot_as_array('prof0')
132
- dax = st.plot_as_array('prof0', keyX='t0', keyY='x', aspect='auto')
133
- ```
129
+ dax = coll.plot_as_array('x')
130
+ dax = coll.plot_as_array('t0')
131
+ dax = coll.plot_as_array('prof0')
132
+ dax = coll.plot_as_array('prof0', keyX='t0', keyY='x', aspect='auto')
133
+ ``
134
134
 
135
135
  You can then decide to store any object category
136
136
  Let's create a 'campaign' category to store the characteristics of each measurements campaign
137
137
  and let's add a 'campaign' parameter to each profile data
138
138
 
139
- ```
139
+ ``
140
140
  # add arbitrary object category as sub-dict of self.dobj
141
141
  for ii in range(nc):
142
- st.add_obj(
142
+ coll.add_obj(
143
143
  which='campaign',
144
144
  key=f'c{ii}',
145
145
  start_date=f'{ii}.04.2022',
@@ -150,16 +150,16 @@ for ii in range(nc):
150
150
  )
151
151
 
152
152
  # create new 'campaign' parameter for data arrays
153
- st.add_param('campaign', which='data')
153
+ coll.add_param('campaign', which='data')
154
154
 
155
155
  # tag each data with its campaign
156
156
  for ii in range(nc):
157
- st.set_param(which='data', key=f't{ii}', param='campaign', value=f'c{ii}')
158
- st.set_param(which='data', key=f'prof{ii}', param='campaign', value=f'c{ii}')
157
+ coll.set_param(which='data', key=f't{ii}', param='campaign', value=f'c{ii}')
158
+ coll.set_param(which='data', key=f'prof{ii}', param='campaign', value=f'c{ii}')
159
159
 
160
160
  # print in the console the content of st
161
- st
162
- ```
161
+ coll
162
+ ``
163
163
 
164
164
  <p align="center">
165
165
  <img align="middle" src="https://github.com/ToFuProject/datastock/blob/devel/README_figures/DataStock_Obj.png" width="600" alt="Direct 3d array visualization"/>
@@ -168,31 +168,31 @@ st
168
168
  DataStock also provides built-in object selection method to allow return all
169
169
  objects matching a criterion, as lits of int indices, bool indices or keys.
170
170
 
171
- ```
172
- In [9]: st.select(which='campaign', index=2, returnas=int)
171
+ ``
172
+ In [9]: coll.select(which='campaign', index=2, returnas=int)
173
173
  Out[9]: array([2])
174
174
 
175
175
  # list of 2 => return all matches inside the interval
176
- In [10]: st.select(which='campaign', index=[2, 4], returnas=int)
176
+ In [10]: coll.select(which='campaign', index=[2, 4], returnas=int)
177
177
  Out[10]: array([2, 3, 4])
178
178
 
179
179
  # tuple of 2 => return all matches outside the interval
180
- In [11]: st.select(which='campaign', index=(2, 4), returnas=int)
180
+ In [11]: coll.select(which='campaign', index=(2, 4), returnas=int)
181
181
  Out[11]: array([0, 1])
182
182
 
183
183
  # return as keys
184
- In [12]: st.select(which='campaign', index=(2, 4), returnas=str)
184
+ In [12]: coll.select(which='campaign', index=(2, 4), returnas=str)
185
185
  Out[12]: array(['c0', 'c1'], dtype='<U2')
186
186
 
187
187
  # return as bool indices
188
- In [13]: st.select(which='campaign', index=(2, 4), returnas=bool)
188
+ In [13]: coll.select(which='campaign', index=(2, 4), returnas=bool)
189
189
  Out[13]: array([ True, True, False, False, False])
190
190
 
191
191
  # You can combine as many constraints as needed
192
- In [17]: st.select(which='campaign', index=[2, 4], operator='Barnaby', returnas=str)
192
+ In [17]: coll.select(which='campaign', index=[2, 4], operator='Barnaby', returnas=str)
193
193
  Out[17]: array(['c3', 'c4'], dtype='<U2')
194
194
 
195
- ```
195
+ ``
196
196
 
197
197
  You can also decide to sub-class DataStock to implement methods and visualizations specific to your needs
198
198
 
@@ -205,6 +205,6 @@ DataStock provides built-in methods like:
205
205
  - size is the total size of all data stored in the instance in bytes
206
206
  - dsize is a dict with the detail (size for each item in each sub-dict of the instance)
207
207
  * `save()`: will save the instance
208
- * `ds.load()`: will load a saved instance
208
+ * `coll.load()`: will load a saved instance
209
209
 
210
210
 
@@ -0,0 +1,31 @@
1
+ # ###############
2
+ # __version__
3
+ # ###############
4
+
5
+
6
+ from . import _version
7
+ __version__ = _version.version
8
+ __version_tuple__ = _version.version_tuple
9
+
10
+
11
+ # from setuptools_scm import get_version
12
+ # __version__ = get_version(root='..', relative_to=__file__)
13
+
14
+
15
+ # from importlib.metadata import version
16
+ # __version__ = version(__package__)
17
+ # cleanup
18
+ # del get_version
19
+
20
+
21
+ # ###############
22
+ # sub-packages
23
+ # ###############
24
+
25
+
26
+ from . import _generic_check
27
+ from ._generic_utils_plot import *
28
+ from ._class import DataStock
29
+ from ._saveload import load, get_files
30
+ from ._direct_calls import *
31
+ from . import tests
@@ -0,0 +1,21 @@
1
+ # file generated by setuptools-scm
2
+ # don't change, don't track in version control
3
+
4
+ __all__ = ["__version__", "__version_tuple__", "version", "version_tuple"]
5
+
6
+ TYPE_CHECKING = False
7
+ if TYPE_CHECKING:
8
+ from typing import Tuple
9
+ from typing import Union
10
+
11
+ VERSION_TUPLE = Tuple[Union[int, str], ...]
12
+ else:
13
+ VERSION_TUPLE = object
14
+
15
+ version: str
16
+ __version__: str
17
+ __version_tuple__: VERSION_TUPLE
18
+ version_tuple: VERSION_TUPLE
19
+
20
+ __version__ = version = '0.0.50'
21
+ __version_tuple__ = version_tuple = (0, 0, 50)