datastock 0.0.47__py3-none-any.whl → 0.0.50__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- datastock/__init__.py +23 -2
- datastock/_class2.py +9 -5
- datastock/_generic_check.py +3 -52
- datastock/_version.py +21 -0
- datastock/tests/prepublish.py +3 -0
- datastock/version.py +1 -1
- {datastock-0.0.47.dist-info → datastock-0.0.50.dist-info}/METADATA +55 -53
- {datastock-0.0.47.dist-info → datastock-0.0.50.dist-info}/RECORD +12 -10
- {datastock-0.0.47.dist-info → datastock-0.0.50.dist-info}/WHEEL +1 -1
- datastock-0.0.50.dist-info/entry_points.txt +2 -0
- datastock-0.0.47.dist-info/LICENSE → datastock-0.0.50.dist-info/licenses/LICENSE.txt +1 -1
- datastock-0.0.50.dist-info/top_level.txt +1 -0
- _updateversion.py +0 -32
- datastock-0.0.47.dist-info/top_level.txt +0 -2
datastock/__init__.py
CHANGED
@@ -1,10 +1,31 @@
|
|
1
|
+
# ###############
|
2
|
+
# __version__
|
3
|
+
# ###############
|
1
4
|
|
2
5
|
|
3
|
-
from .
|
6
|
+
from . import _version
|
7
|
+
__version__ = _version.version
|
8
|
+
__version_tuple__ = _version.version_tuple
|
9
|
+
|
10
|
+
|
11
|
+
# from setuptools_scm import get_version
|
12
|
+
# __version__ = get_version(root='..', relative_to=__file__)
|
13
|
+
|
14
|
+
|
15
|
+
# from importlib.metadata import version
|
16
|
+
# __version__ = version(__package__)
|
17
|
+
# cleanup
|
18
|
+
# del get_version
|
19
|
+
|
20
|
+
|
21
|
+
# ###############
|
22
|
+
# sub-packages
|
23
|
+
# ###############
|
24
|
+
|
4
25
|
|
5
26
|
from . import _generic_check
|
6
27
|
from ._generic_utils_plot import *
|
7
28
|
from ._class import DataStock
|
8
29
|
from ._saveload import load, get_files
|
9
30
|
from ._direct_calls import *
|
10
|
-
from . import tests
|
31
|
+
from . import tests
|
datastock/_class2.py
CHANGED
@@ -629,7 +629,7 @@ class DataStock2(DataStock1):
|
|
629
629
|
v0['handle'].parent(),
|
630
630
|
)
|
631
631
|
except Exception as err:
|
632
|
-
error = err
|
632
|
+
error = "1\n" + str(err)
|
633
633
|
elif hasattr(v0['handle'], 'parent'):
|
634
634
|
try:
|
635
635
|
v0['handle'].manager.toolbar.__init__(
|
@@ -637,9 +637,12 @@ class DataStock2(DataStock1):
|
|
637
637
|
v0['handle'].parent(),
|
638
638
|
)
|
639
639
|
except Exception as err:
|
640
|
-
|
640
|
+
if "can't initialize an object twice" in str(err):
|
641
|
+
pass
|
642
|
+
else:
|
643
|
+
error = "2\n" + str(err)
|
641
644
|
else:
|
642
|
-
error =
|
645
|
+
error = "3"
|
643
646
|
|
644
647
|
if error is not False:
|
645
648
|
import platform
|
@@ -648,6 +651,7 @@ class DataStock2(DataStock1):
|
|
648
651
|
lstr0 = [f"\t- {k1}" for k1 in dir(v0['handle'])]
|
649
652
|
lstr1 = [f"\t- {k1}" for k1 in dir(v0['handle'].manager.toolbar)]
|
650
653
|
msg = (
|
654
|
+
"Problem with connect()\n"
|
651
655
|
f"platform: {platform.platform()}\n"
|
652
656
|
f"python: {sys.version}\n"
|
653
657
|
f"backend: {plt.get_backend()}\n"
|
@@ -657,7 +661,7 @@ class DataStock2(DataStock1):
|
|
657
661
|
+ "\n".join(lstr1)
|
658
662
|
)
|
659
663
|
if error is not True:
|
660
|
-
msg += '\n' + str(
|
664
|
+
msg += '\n' + str(error)
|
661
665
|
warnings.warn(msg)
|
662
666
|
|
663
667
|
self._dobj['canvas'][k0]['cid'] = {
|
@@ -1393,4 +1397,4 @@ class DataStock2(DataStock1):
|
|
1393
1397
|
|
1394
1398
|
__all__ = [
|
1395
1399
|
sorted([k0 for k0 in locals() if k0.startswith('DataStock')])[-1]
|
1396
|
-
]
|
1400
|
+
]
|
datastock/_generic_check.py
CHANGED
@@ -362,8 +362,6 @@ def _check_dict_valid_keys(
|
|
362
362
|
var[k0] = None
|
363
363
|
continue
|
364
364
|
|
365
|
-
vv = var.get(k0)
|
366
|
-
|
367
365
|
# routine to call
|
368
366
|
if any([ss in v0.keys() for ss in lkarray]):
|
369
367
|
var[k0] = _check_flat1darray(
|
@@ -610,7 +608,7 @@ def _check_all_broadcastable(
|
|
610
608
|
ndim = lndim[0]
|
611
609
|
|
612
610
|
else:
|
613
|
-
lstr = [f"-
|
611
|
+
lstr = [f"\t- {k0}: {v0}" for k0, v0 in dndim.items()]
|
614
612
|
msg = (
|
615
613
|
"Some keyword args have non-compatible dimensions:\n"
|
616
614
|
+ "\n".join(lstr)
|
@@ -675,48 +673,6 @@ def _check_all_broadcastable(
|
|
675
673
|
# Utilities for plotting
|
676
674
|
# #############################################################################
|
677
675
|
|
678
|
-
# DEPRECATED
|
679
|
-
# def _check_inplace(coll=None, keys=None):
|
680
|
-
# """ Check key to data and inplace """
|
681
|
-
|
682
|
-
# # -----------------------------
|
683
|
-
# # keys of data to be extracted
|
684
|
-
# # ----------------------------
|
685
|
-
|
686
|
-
# if isinstance(keys, str):
|
687
|
-
# keys = [keys]
|
688
|
-
# keys = _check_var_iter(
|
689
|
-
# keys, 'keys',
|
690
|
-
# default=None,
|
691
|
-
# types=list,
|
692
|
-
# types_iter=str,
|
693
|
-
# allowed=list(coll.ddata.keys()),
|
694
|
-
# )
|
695
|
-
|
696
|
-
# # ----------------------
|
697
|
-
# # extract sub-collection
|
698
|
-
# # ----------------------
|
699
|
-
|
700
|
-
# lk0 = list(keys)
|
701
|
-
# for key in keys:
|
702
|
-
|
703
|
-
# # Include all data matching any single ref
|
704
|
-
# for rr in coll._ddata[key]['ref']:
|
705
|
-
# for k0, v0 in coll._ddata.items():
|
706
|
-
# if v0['ref'] == (rr,):
|
707
|
-
# if k0 not in lk0:
|
708
|
-
# lk0.append(k0)
|
709
|
-
|
710
|
-
# # include all data matching all refs
|
711
|
-
# for k0, v0 in coll._ddata.items():
|
712
|
-
# if v0['ref'] == coll._ddata[key]['ref']:
|
713
|
-
# if k0 not in lk0:
|
714
|
-
# lk0.append(k0)
|
715
|
-
|
716
|
-
# coll2 = coll.extract(lk0)
|
717
|
-
|
718
|
-
# return keys, coll2
|
719
|
-
|
720
676
|
|
721
677
|
def _check_dax(dax=None, main=None):
|
722
678
|
|
@@ -844,7 +800,7 @@ def _check_lim(lim):
|
|
844
800
|
if len(dfail) > 0:
|
845
801
|
lstr = [f"\t- lim[{ii}]: {vv}" for ii, vv in dfail.items()]
|
846
802
|
msg = (
|
847
|
-
"The following non-conformities in lim have been identified:\n"
|
803
|
+
"The following non-conformities in lim have been identified:\n"
|
848
804
|
+ "\n".join(lstr)
|
849
805
|
)
|
850
806
|
raise Exception(msg)
|
@@ -900,7 +856,6 @@ def _apply_lim(lim=None, data=None, logic=None):
|
|
900
856
|
return ind
|
901
857
|
|
902
858
|
|
903
|
-
|
904
859
|
def _apply_dlim(dlim=None, logic_intervals=None, logic=None, ddata=None):
|
905
860
|
|
906
861
|
# ------------
|
@@ -960,6 +915,7 @@ def _apply_dlim(dlim=None, logic_intervals=None, logic=None, ddata=None):
|
|
960
915
|
lstr = [f"\t- {k0}: {v0}" for k0, v0 in dfail.items()]
|
961
916
|
msg = (
|
962
917
|
"The following keys have non-compatible shapes:\n"
|
918
|
+
+ "\n".join(lstr)
|
963
919
|
)
|
964
920
|
raise Exception(msg)
|
965
921
|
|
@@ -1021,11 +977,6 @@ def _apply_dlim(dlim=None, logic_intervals=None, logic=None, ddata=None):
|
|
1021
977
|
|
1022
978
|
def _check_cmap_vminvmax(data=None, cmap=None, vmin=None, vmax=None):
|
1023
979
|
# cmap
|
1024
|
-
c0 = (
|
1025
|
-
cmap is None
|
1026
|
-
or vmin is None
|
1027
|
-
or vmax is None
|
1028
|
-
)
|
1029
980
|
if cmap is None or vmin is None or vmax is None:
|
1030
981
|
nanmax = np.nanmax(data)
|
1031
982
|
nanmin = np.nanmin(data)
|
datastock/_version.py
ADDED
@@ -0,0 +1,21 @@
|
|
1
|
+
# file generated by setuptools-scm
|
2
|
+
# don't change, don't track in version control
|
3
|
+
|
4
|
+
__all__ = ["__version__", "__version_tuple__", "version", "version_tuple"]
|
5
|
+
|
6
|
+
TYPE_CHECKING = False
|
7
|
+
if TYPE_CHECKING:
|
8
|
+
from typing import Tuple
|
9
|
+
from typing import Union
|
10
|
+
|
11
|
+
VERSION_TUPLE = Tuple[Union[int, str], ...]
|
12
|
+
else:
|
13
|
+
VERSION_TUPLE = object
|
14
|
+
|
15
|
+
version: str
|
16
|
+
__version__: str
|
17
|
+
__version_tuple__: VERSION_TUPLE
|
18
|
+
version_tuple: VERSION_TUPLE
|
19
|
+
|
20
|
+
__version__ = version = '0.0.50'
|
21
|
+
__version_tuple__ = version_tuple = (0, 0, 50)
|
datastock/version.py
CHANGED
@@ -1,2 +1,2 @@
|
|
1
1
|
# Do not edit, pipeline versioning governed by git tags!
|
2
|
-
__version__ = '0.0.
|
2
|
+
__version__ = '0.0.49-1-g545b8a4'
|
@@ -1,34 +1,36 @@
|
|
1
|
-
Metadata-Version: 2.
|
1
|
+
Metadata-Version: 2.4
|
2
2
|
Name: datastock
|
3
|
-
Version: 0.0.
|
4
|
-
Summary:
|
5
|
-
|
6
|
-
|
7
|
-
Author-email: didier.vezinet@gmail.com
|
3
|
+
Version: 0.0.50
|
4
|
+
Summary: Generic handler for multiple heterogenous numpy arrays and subclasses
|
5
|
+
Author-email: Didier VEZINET <didier.vezinet@gmail.com>
|
6
|
+
Maintainer-email: Didier VEZINET <didier.vezinet@gmail.com>
|
8
7
|
License: MIT
|
9
|
-
|
10
|
-
|
8
|
+
Project-URL: Homepage, https://github.com/ToFuProject/datastock
|
9
|
+
Project-URL: Issues, https://github.com/ToFuProject/datastock/issues
|
10
|
+
Keywords: data,analysis,interactive,heterogeneous arrays,numpy,Collection
|
11
|
+
Classifier: Development Status :: 5 - Production/Stable
|
11
12
|
Classifier: Intended Audience :: Science/Research
|
12
|
-
Classifier:
|
13
|
-
Classifier: License :: OSI Approved :: MIT License
|
13
|
+
Classifier: Programming Language :: Python :: 3
|
14
14
|
Classifier: Programming Language :: Python :: 3.6
|
15
15
|
Classifier: Programming Language :: Python :: 3.7
|
16
16
|
Classifier: Programming Language :: Python :: 3.8
|
17
|
+
Classifier: Programming Language :: Python :: 3.9
|
18
|
+
Classifier: Programming Language :: Python :: 3.10
|
19
|
+
Classifier: Programming Language :: Python :: 3.11
|
17
20
|
Classifier: Natural Language :: English
|
18
|
-
Requires-Python: >=3.
|
21
|
+
Requires-Python: >=3.8
|
19
22
|
Description-Content-Type: text/markdown
|
20
|
-
License-File: LICENSE
|
23
|
+
License-File: LICENSE.txt
|
21
24
|
Requires-Dist: numpy
|
22
25
|
Requires-Dist: scipy
|
23
26
|
Requires-Dist: matplotlib
|
27
|
+
Requires-Dist: PyQt5; platform_system != "Windows"
|
24
28
|
Requires-Dist: astropy
|
25
|
-
Provides-Extra:
|
26
|
-
Requires-Dist:
|
27
|
-
|
28
|
-
Requires-Dist:
|
29
|
-
|
30
|
-
Requires-Dist: sphinx-gallery; extra == "dev"
|
31
|
-
Requires-Dist: sphinx-bootstrap-theme; extra == "dev"
|
29
|
+
Provides-Extra: linting
|
30
|
+
Requires-Dist: ruff; extra == "linting"
|
31
|
+
Provides-Extra: formatting
|
32
|
+
Requires-Dist: ruff; extra == "formatting"
|
33
|
+
Dynamic: license-file
|
32
34
|
|
33
35
|
[](https://anaconda.org/conda-forge/datastock)
|
34
36
|
[](https://anaconda.org/conda-forge/datastock)
|
@@ -73,15 +75,15 @@ Examples:
|
|
73
75
|
Straightforward array visualization:
|
74
76
|
------------------------------------
|
75
77
|
|
76
|
-
|
78
|
+
``
|
77
79
|
import datastock as ds
|
78
80
|
|
79
81
|
# any 1d, 2d or 3d array
|
80
|
-
aa = np.
|
82
|
+
aa = np.random((100, 100, 100))
|
81
83
|
|
82
84
|
# plot interactive figure using shortcut to method
|
83
85
|
dax = ds.plot_as_array(aa)
|
84
|
-
|
86
|
+
``
|
85
87
|
|
86
88
|
Now do **shift + left clic** on any axes, the rest of the interactive commands are automatically printed in your python console
|
87
89
|
|
@@ -107,7 +109,7 @@ Thanks to dref, the class knows the relationaships between all numpy arrays.
|
|
107
109
|
In particular it knows which arrays share the same references / dimensions
|
108
110
|
|
109
111
|
|
110
|
-
```
|
112
|
+
```python
|
111
113
|
import numpy as np
|
112
114
|
import datastock as ds
|
113
115
|
|
@@ -128,24 +130,24 @@ lprof = [(1 + np.cos(t)[:, None]) * x[None, :] for t in lt]
|
|
128
130
|
# Populate DataStock
|
129
131
|
|
130
132
|
# instanciate
|
131
|
-
|
133
|
+
coll = ds.DataStock()
|
132
134
|
|
133
135
|
# add references (i.e.: store size of each dimension under a unique key)
|
134
|
-
|
135
|
-
|
136
|
+
coll.add_ref(key='nc', size=nc)
|
137
|
+
coll.add_ref(key='nx', size=nx)
|
136
138
|
for ii, nt in enumerate(lnt):
|
137
|
-
|
139
|
+
coll.add_ref(key=f'nt{ii}', size=nt)
|
138
140
|
|
139
141
|
# add data dependening on these references
|
140
142
|
# you can, optionally, specify units, physical dimensionality (ex: distance, time...), quantity (ex: radius, height, ...) and name (to your liking)
|
141
143
|
|
142
|
-
|
144
|
+
coll.add_data(key='x', data=x, dimension='distance', quant='radius', units='m', ref='nx')
|
143
145
|
for ii, nt in enumerate(lnt):
|
144
|
-
|
145
|
-
|
146
|
+
coll.add_data(key=f't{ii}', data=lt[ii], dimension='time', units='s', ref=f'nt{ii}')
|
147
|
+
coll.add_data(key=f'prof{ii}', data=lprof[ii], dimension='velocity', units='m/s', ref=(f'nt{ii}', 'x'))
|
146
148
|
|
147
149
|
# print in the console the content of st
|
148
|
-
|
150
|
+
coll
|
149
151
|
```
|
150
152
|
|
151
153
|
<p align="center">
|
@@ -156,22 +158,22 @@ You can see that DataStock stores the relationships between each array and each
|
|
156
158
|
Specifying explicitly the references is only necessary if there is an ambiguity (i.e.: several references have the same size, like nx and nt2 in our case)
|
157
159
|
|
158
160
|
|
159
|
-
|
161
|
+
``
|
160
162
|
# plot any array interactively
|
161
|
-
dax =
|
162
|
-
dax =
|
163
|
-
dax =
|
164
|
-
dax =
|
165
|
-
|
163
|
+
dax = coll.plot_as_array('x')
|
164
|
+
dax = coll.plot_as_array('t0')
|
165
|
+
dax = coll.plot_as_array('prof0')
|
166
|
+
dax = coll.plot_as_array('prof0', keyX='t0', keyY='x', aspect='auto')
|
167
|
+
``
|
166
168
|
|
167
169
|
You can then decide to store any object category
|
168
170
|
Let's create a 'campaign' category to store the characteristics of each measurements campaign
|
169
171
|
and let's add a 'campaign' parameter to each profile data
|
170
172
|
|
171
|
-
|
173
|
+
``
|
172
174
|
# add arbitrary object category as sub-dict of self.dobj
|
173
175
|
for ii in range(nc):
|
174
|
-
|
176
|
+
coll.add_obj(
|
175
177
|
which='campaign',
|
176
178
|
key=f'c{ii}',
|
177
179
|
start_date=f'{ii}.04.2022',
|
@@ -182,16 +184,16 @@ for ii in range(nc):
|
|
182
184
|
)
|
183
185
|
|
184
186
|
# create new 'campaign' parameter for data arrays
|
185
|
-
|
187
|
+
coll.add_param('campaign', which='data')
|
186
188
|
|
187
189
|
# tag each data with its campaign
|
188
190
|
for ii in range(nc):
|
189
|
-
|
190
|
-
|
191
|
+
coll.set_param(which='data', key=f't{ii}', param='campaign', value=f'c{ii}')
|
192
|
+
coll.set_param(which='data', key=f'prof{ii}', param='campaign', value=f'c{ii}')
|
191
193
|
|
192
194
|
# print in the console the content of st
|
193
|
-
|
194
|
-
|
195
|
+
coll
|
196
|
+
``
|
195
197
|
|
196
198
|
<p align="center">
|
197
199
|
<img align="middle" src="https://github.com/ToFuProject/datastock/blob/devel/README_figures/DataStock_Obj.png" width="600" alt="Direct 3d array visualization"/>
|
@@ -200,31 +202,31 @@ st
|
|
200
202
|
DataStock also provides built-in object selection method to allow return all
|
201
203
|
objects matching a criterion, as lits of int indices, bool indices or keys.
|
202
204
|
|
203
|
-
|
204
|
-
In [9]:
|
205
|
+
``
|
206
|
+
In [9]: coll.select(which='campaign', index=2, returnas=int)
|
205
207
|
Out[9]: array([2])
|
206
208
|
|
207
209
|
# list of 2 => return all matches inside the interval
|
208
|
-
In [10]:
|
210
|
+
In [10]: coll.select(which='campaign', index=[2, 4], returnas=int)
|
209
211
|
Out[10]: array([2, 3, 4])
|
210
212
|
|
211
213
|
# tuple of 2 => return all matches outside the interval
|
212
|
-
In [11]:
|
214
|
+
In [11]: coll.select(which='campaign', index=(2, 4), returnas=int)
|
213
215
|
Out[11]: array([0, 1])
|
214
216
|
|
215
217
|
# return as keys
|
216
|
-
In [12]:
|
218
|
+
In [12]: coll.select(which='campaign', index=(2, 4), returnas=str)
|
217
219
|
Out[12]: array(['c0', 'c1'], dtype='<U2')
|
218
220
|
|
219
221
|
# return as bool indices
|
220
|
-
In [13]:
|
222
|
+
In [13]: coll.select(which='campaign', index=(2, 4), returnas=bool)
|
221
223
|
Out[13]: array([ True, True, False, False, False])
|
222
224
|
|
223
225
|
# You can combine as many constraints as needed
|
224
|
-
In [17]:
|
226
|
+
In [17]: coll.select(which='campaign', index=[2, 4], operator='Barnaby', returnas=str)
|
225
227
|
Out[17]: array(['c3', 'c4'], dtype='<U2')
|
226
228
|
|
227
|
-
|
229
|
+
``
|
228
230
|
|
229
231
|
You can also decide to sub-class DataStock to implement methods and visualizations specific to your needs
|
230
232
|
|
@@ -237,6 +239,6 @@ DataStock provides built-in methods like:
|
|
237
239
|
- size is the total size of all data stored in the instance in bytes
|
238
240
|
- dsize is a dict with the detail (size for each item in each sub-dict of the instance)
|
239
241
|
* `save()`: will save the instance
|
240
|
-
* `
|
242
|
+
* `coll.load()`: will load a saved instance
|
241
243
|
|
242
244
|
|
@@ -1,6 +1,5 @@
|
|
1
|
-
_updateversion.py,sha256=OR6OJJozaHWzu7NWjKu5ERi0IyYqR61KrWvzf7kfoto,951
|
2
1
|
datastock/_DataCollection_utils.py,sha256=hHf6HvGKMmM-psx3fj9QcY1TEmKrAtTdkRokH7SFqoo,7143
|
3
|
-
datastock/__init__.py,sha256=
|
2
|
+
datastock/__init__.py,sha256=LLPm4DAkXl5xYAQT4jUfQd5ZAmYx5w2nBSBmjN2XhdE,598
|
4
3
|
datastock/_class.py,sha256=Az9PS3aSskiPMb1ekt78Y2ynBujYVc_cDjJxW9xH9g4,47
|
5
4
|
datastock/_class0.py,sha256=je4ckTynK8lEGBa7OSURYZZ_-3XjzAtf6SazN3b-f5k,6028
|
6
5
|
datastock/_class1.py,sha256=2PrIT26yRfNrP6YmZMpvb0_b0v_397Y9tOjzeVOxK_I,29313
|
@@ -12,13 +11,13 @@ datastock/_class1_domain.py,sha256=_xUCnwWJX5wPPYrpiGGrRwrPLmiO5BMm3nmYvn_YEOg,6
|
|
12
11
|
datastock/_class1_interpolate.py,sha256=3VKGMDsiWFQUguMHxMaTQTyMhcN8Ikg1PmaH6TjIeLg,38348
|
13
12
|
datastock/_class1_show.py,sha256=hqd-FeJ1NqiOzbrHzGMrwIo8_lLsjC199Zmw68NqkDQ,11745
|
14
13
|
datastock/_class1_uniformize.py,sha256=dEJime_0SqmW8hX8ooZpHsPI_d8CIE9U9Yz9GhqsEUY,28433
|
15
|
-
datastock/_class2.py,sha256=
|
14
|
+
datastock/_class2.py,sha256=Mkqwhkl-YDAT0Wx5bHkUHwMW2L6feACkxmifvvzUnYk,45626
|
16
15
|
datastock/_class2_interactivity.py,sha256=glAqqkIyH8JBVj0AsgJPNlaltcTIXB0iCqNBq3gKSb8,17198
|
17
16
|
datastock/_class3.py,sha256=CH1oD_lTfVlcDp29L_iwzSfP78vX6_edDmZG9aSb1Ks,10848
|
18
17
|
datastock/_direct_calls.py,sha256=EHFwI2mGMDqGz8_Bv2BseMBX4J8dSdE_RcNX3pt0ZYY,1801
|
19
18
|
datastock/_export_dataframe.py,sha256=fy-uJR3EhDlHvd9ls1EQna_C8fyha1jCJLu1DTKTkdo,1576
|
20
19
|
datastock/_find_plateau.py,sha256=sqnAuy0361DXkqBb_Lo1MmIGjn35tnKFvcv6MW6hifs,2685
|
21
|
-
datastock/_generic_check.py,sha256=
|
20
|
+
datastock/_generic_check.py,sha256=4NmG7iPa02bxOC5K-6qw63UahQ2FNDbeKu_AGBpNMtc,26717
|
22
21
|
datastock/_generic_utils.py,sha256=_iV51SiujEmQfAlvyIEW4BvzIXdhPCD-vumV5DmUL44,23421
|
23
22
|
datastock/_generic_utils_plot.py,sha256=xrWzeZFtdTAs-RO2DfpCRveJPqw_p4lRFtQuuAn1pD8,3709
|
24
23
|
datastock/_plot_BvsA_as_distribution.py,sha256=fpRhlbv3Bk96buANC46Brc9hdLxkOAsoKpE5A9pohG0,15389
|
@@ -32,12 +31,15 @@ datastock/_plot_correlations.py,sha256=ITOypu_AEoKl0ihxocV-JVTXIHqut6p9TfG-xZmQy
|
|
32
31
|
datastock/_plot_old_backup.py,sha256=XixTi2CiihKjtQP0TRycH0b25caWN1m35DgpsDeiWZE,21729
|
33
32
|
datastock/_plot_text.py,sha256=wQPqjfpLyIioS2JeOt3E9C9HgYUJ49YEoOgRuKYvAR8,3143
|
34
33
|
datastock/_saveload.py,sha256=1vAMp27KfqXbo5b_Pi8hJux0stsHq6dO5vy8k1d4_iA,14141
|
35
|
-
datastock/
|
34
|
+
datastock/_version.py,sha256=E35Bq14aHx0dG_mA8HPiJoVRTCMU_V3unyyjRET8Kww,513
|
35
|
+
datastock/version.py,sha256=VJcnSK0zygwEhh_p6KIoC9yiMkp0bs-SJgLCbyubs9o,91
|
36
36
|
datastock/tests/__init__.py,sha256=teOo2xP0IO7PQMuMDmum61XVHe2TuxW3BiHiL73X8jQ,35
|
37
|
+
datastock/tests/prepublish.py,sha256=3gSr3nRhEl5qsmhxJ-nEoePvbINYq0EvcZCfxfUVHBM,83
|
37
38
|
datastock/tests/test_01_DataStock.py,sha256=aUseXH2zYnFtNDJSCuEROgPxfKKNeLCkCQSR--_Fheg,19176
|
38
39
|
datastock/tests/output/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
39
|
-
datastock-0.0.
|
40
|
-
datastock-0.0.
|
41
|
-
datastock-0.0.
|
42
|
-
datastock-0.0.
|
43
|
-
datastock-0.0.
|
40
|
+
datastock-0.0.50.dist-info/licenses/LICENSE.txt,sha256=SM-QOA3JwR1e0RVBGb1HhPmc5yD1Gsl1CRPnMSKJhH8,1068
|
41
|
+
datastock-0.0.50.dist-info/METADATA,sha256=FLc5tLiMeT7uqPs9_kNV3tZNALar_DErqHZeO0uql68,8893
|
42
|
+
datastock-0.0.50.dist-info/WHEEL,sha256=CmyFI0kx5cdEMTLiONQRbGQwjIoR1aIYB7eCAQ4KPJ0,91
|
43
|
+
datastock-0.0.50.dist-info/entry_points.txt,sha256=GqmxVVp9G2ulEDaS9gLpwSVOBLF_FEBlj8k5Z6TJKsc,42
|
44
|
+
datastock-0.0.50.dist-info/top_level.txt,sha256=0HeA0gZ4G1IKtkPhmqijZRYH9hID6LKLQskeSjAna8g,10
|
45
|
+
datastock-0.0.50.dist-info/RECORD,,
|
@@ -0,0 +1 @@
|
|
1
|
+
datastock
|
_updateversion.py
DELETED
@@ -1,32 +0,0 @@
|
|
1
|
-
#!/usr/bin/env/python
|
2
|
-
# coding=utf-8
|
3
|
-
|
4
|
-
|
5
|
-
import os
|
6
|
-
import subprocess
|
7
|
-
|
8
|
-
|
9
|
-
_HERE = os.path.abspath(os.path.dirname(__file__))
|
10
|
-
|
11
|
-
|
12
|
-
def updateversion(path=_HERE):
|
13
|
-
|
14
|
-
# Fetch version from git tags, and write to version.py
|
15
|
-
# Also, when git is not available (PyPi package), use stored version.py
|
16
|
-
version_py = os.path.join(path, 'datastock', 'version.py')
|
17
|
-
try:
|
18
|
-
version_git = subprocess.check_output(
|
19
|
-
["git", "describe"]
|
20
|
-
).rstrip().decode()
|
21
|
-
|
22
|
-
except subprocess.CalledProcessError:
|
23
|
-
with open(version_py, 'r') as fh:
|
24
|
-
version_git = fh.read().strip().split("=")[-1].replace("'", '')
|
25
|
-
|
26
|
-
version_git = version_git.lower().replace('v', '').replace(' ', '')
|
27
|
-
|
28
|
-
version_msg = "# Do not edit, pipeline versioning governed by git tags!"
|
29
|
-
with open(version_py, "w") as fh:
|
30
|
-
msg = "{0}__version__ = '{1}'{0}".format(os.linesep, version_git)
|
31
|
-
fh.write(version_msg + msg)
|
32
|
-
return version_git
|