datastock 0.0.44__py3-none-any.whl → 0.0.46__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
datastock/_class1.py CHANGED
@@ -21,6 +21,7 @@ from . import _class1_domain
21
21
  from . import _class1_binning
22
22
  from . import _class1_interpolate
23
23
  from . import _class1_uniformize
24
+ from . import _class1_color_touch as _color_touch
24
25
  from . import _export_dataframe
25
26
  from . import _find_plateau
26
27
 
@@ -923,6 +924,32 @@ class DataStock1(DataStock0):
923
924
  inplace=inplace,
924
925
  )
925
926
 
927
+ # ---------------------
928
+ # color touch array
929
+ # ---------------------
930
+
931
+ def get_color_touch(
932
+ self,
933
+ data=None,
934
+ dcolor=None,
935
+ # options
936
+ color_default=None,
937
+ vmin=None,
938
+ vmax=None,
939
+ log=None,
940
+ ):
941
+
942
+ return _color_touch.main(
943
+ coll=self,
944
+ data=data,
945
+ dcolor=dcolor,
946
+ # options
947
+ color_default=color_default,
948
+ vmin=vmin,
949
+ vmax=vmax,
950
+ log=log,
951
+ )
952
+
926
953
  # ---------------------
927
954
  # Methods computing correlations
928
955
  # ---------------------
@@ -0,0 +1,269 @@
1
+ # -*- coding: utf-8 -*-
2
+ """
3
+ Created on Fri Feb 28 08:53:00 2025
4
+
5
+ @author: dvezinet
6
+ """
7
+
8
+
9
+ import numpy as np
10
+ import matplotlib.pyplot as plt
11
+ import matplotlib.colors as mcolors
12
+ import datastock as ds
13
+
14
+
15
+ # ###############################################################
16
+ # ###############################################################
17
+ # Main
18
+ # ###############################################################
19
+
20
+
21
+ def main(
22
+ coll=None,
23
+ data=None,
24
+ dcolor=None,
25
+ # options
26
+ color_default=None,
27
+ vmin=None,
28
+ vmax=None,
29
+ log=None,
30
+ ):
31
+
32
+ # ------------------
33
+ # check inputs
34
+ # ------------------
35
+
36
+ data, dcolor, color_default, vmin, vmax, log = _check(
37
+ coll=coll,
38
+ data=data,
39
+ dcolor=dcolor,
40
+ color_default=color_default,
41
+ vmin=vmin,
42
+ vmax=vmax,
43
+ log=log,
44
+ )
45
+
46
+ # ------------------
47
+ # initialize
48
+ # ------------------
49
+
50
+ shape = data.shape + (4,)
51
+ color = np.zeros(shape, dtype=float)
52
+
53
+ # ------------------
54
+ # compute - alpha
55
+ # ------------------
56
+
57
+ if log is True:
58
+ vmin = np.log10(vmin)
59
+ vmax = np.log10(vmax)
60
+
61
+ alpha = (np.log10(data) - vmin) / (vmax - vmin)
62
+
63
+ else:
64
+ alpha = (data - vmin) / (vmax - vmin)
65
+
66
+ # ------------------
67
+ # compute - colors
68
+ # ------------------
69
+
70
+ for k0, v0 in dcolor.items():
71
+
72
+ sli = (v0['ind'], slice(0, 3))
73
+ color[sli] = v0['color']
74
+
75
+ sli = tuple([slice(None) for ii in range(data.ndim)] + [-1])
76
+ color[sli] = alpha
77
+
78
+ # ------------------
79
+ # output
80
+ # ------------------
81
+
82
+ lcol = set([v0['color'] for v0 in dcolor.values()])
83
+ dcolor = {
84
+ 'color': color,
85
+ 'meaning': {
86
+ kc: [k0 for k0, v0 in dcolor.items() if v0['color'] == kc]
87
+ for kc in lcol
88
+ },
89
+ }
90
+
91
+ return dcolor
92
+
93
+
94
+ # ###############################################################
95
+ # ###############################################################
96
+ # check
97
+ # ###############################################################
98
+
99
+
100
+ def _check(
101
+ coll=None,
102
+ data=None,
103
+ dcolor=None,
104
+ # options
105
+ color_default=None,
106
+ vmin=None,
107
+ vmax=None,
108
+ log=None,
109
+ ):
110
+
111
+ # ------------------
112
+ # data
113
+ # ------------------
114
+
115
+ lc = [
116
+ isinstance(data, np.ndarray),
117
+ isinstance(data, str) and data in coll.ddata.keys(),
118
+ ]
119
+ if lc[0]:
120
+ pass
121
+ elif lc[1]:
122
+ data = coll.ddata[data]['data']
123
+ else:
124
+ msg = (
125
+ "Arg data must be a np.ndarray or a key to an existing data!\n"
126
+ f"Provided: {data}\n"
127
+ )
128
+ raise Exception(msg)
129
+
130
+
131
+ # ------------------
132
+ # dcolor
133
+ # ------------------
134
+
135
+ # --------------------
136
+ # dcolor format check
137
+
138
+ c0 = (
139
+ isinstance(dcolor, dict)
140
+ and all([
141
+ isinstance(k0, str)
142
+ and isinstance(v0, dict)
143
+ and sorted(v0.keys()) == ['color', 'ind']
144
+ for k0, v0 in dcolor.items()
145
+ ])
146
+ )
147
+ if not c0:
148
+ msg = (
149
+ "Arg dcolor must be a dict of sub-dicts of shape:\n"
150
+ "\t- 'key0': {'ind': ..., 'color': ...}\n"
151
+ "\t- ...\n"
152
+ "\t- 'keyN': {'ind': ..., 'color': ...}\n"
153
+ f"Provided:\n{dcolor}\n"
154
+ )
155
+ raise Exception(msg)
156
+
157
+ # --------------------
158
+ # ind and color checks
159
+
160
+ dfail = {}
161
+ shape = data.shape
162
+ for k0, v0 in dcolor.items():
163
+
164
+ c0 = (
165
+ isinstance(v0['ind'], np.ndarray)
166
+ and v0['ind'].shape == data.shape
167
+ and v0['ind'].dtype == bool
168
+ )
169
+ if not c0:
170
+ msg = f"'ind' must be a {shape} bool array, not {v0['ind']}"
171
+ dfail[k0] = (msg,)
172
+
173
+ if not mcolors.is_color_like(v0['color']):
174
+ msg = f"'color' must be color-like, not {v0['color']}"
175
+ if k0 in dfail:
176
+ dfail[k0] = dfail[k0] + (msg,)
177
+ else:
178
+ dfail[k0] = (msg,)
179
+
180
+ # raise exception
181
+ if len(dfail) > 0:
182
+ lmax = np.max([len(f"\t- {k0}: ") for k0 in dfail.keys()])
183
+ lstr = [
184
+ f"\t- {k0}:\n".ljust(lmax) + '\n'.join([
185
+ "".ljust(lmax+4) + f"\t- {v1}".rjust(lmax)
186
+ for ii, v1 in enumerate(v0)
187
+ ])
188
+ for k0, v0 in dfail.items()
189
+ ]
190
+ msg = (
191
+ "Arg dcolor, the following keys have incorrect keys / values:\n"
192
+ + "\n".join(lstr)
193
+ )
194
+ raise Exception(msg)
195
+
196
+ # ----------------------
197
+ # format colors to rgb
198
+
199
+ dcol = {}
200
+ for k0, v0 in dcolor.items():
201
+ if np.any(v0['ind']):
202
+ dcol[k0] = {
203
+ 'ind': v0['ind'],
204
+ 'color': mcolors.to_rgb(v0['color']),
205
+ }
206
+
207
+ # ------------------
208
+ # color_default
209
+ # ------------------
210
+
211
+ if color_default is None:
212
+ color_default = 'k'
213
+ if not mcolors.is_color_like(color_default):
214
+ msg = (
215
+ "Arg color_default must be color-like!\n"
216
+ f"Provided: {color_default}\n"
217
+ )
218
+ raise Exception(msg)
219
+
220
+ color_default = mcolors.to_rgb(color_default)
221
+
222
+ # ------------------
223
+ # vmin, vmax
224
+ # ------------------
225
+
226
+ vmin0 = np.nanmin(data)
227
+ vmax0 = np.nanmax(data)
228
+
229
+ # vmin
230
+ if vmin is None:
231
+ vmin = vmin0
232
+ c0 = (np.isscalar(vmin) and np.isfinite(vmin) and vmin < vmax0)
233
+ if not c0:
234
+ msg = (
235
+ f"Arg vmin must be a finite scalar below max ({vmax0})\n"
236
+ f"Provided: {vmin}\n"
237
+ )
238
+ raise Exception(msg)
239
+
240
+ # vmax
241
+ if vmax is None:
242
+ vmax = vmax0
243
+ c0 = (np.isscalar(vmax) and np.isfinite(vmax) and vmax > vmin0)
244
+ if not c0:
245
+ msg = (
246
+ f"Arg vmax must be a finite scalar above min ({vmin0})\n"
247
+ f"Provided: {vmax}\n"
248
+ )
249
+ raise Exception(msg)
250
+
251
+ # ordering
252
+ if vmin >= vmax:
253
+ msg = (
254
+ "Arg vmin must be below vmax!\n"
255
+ f"Provided:\n\t- vmin = {vmin}\n\t- vmax = {vmax}\n"
256
+ )
257
+ raise Exception(msg)
258
+
259
+ # ------------------
260
+ # log
261
+ # ------------------
262
+
263
+ log = ds._generic_check._check_var(
264
+ log, 'log',
265
+ types=bool,
266
+ default=False,
267
+ )
268
+
269
+ return data, dcol, color_default, vmin, vmax, log
@@ -1233,4 +1233,4 @@ def _extract_select(
1233
1233
  # lkey=[idq2dR],
1234
1234
  # return_all=True,
1235
1235
  # )
1236
- # return out
1236
+ # return out
@@ -30,11 +30,14 @@ def domain_ref(
30
30
  # -----------
31
31
  # get indices
32
32
 
33
- lvectu = sorted({v0['vect'] for v0 in domain.values()})
33
+ lvectu = sorted({
34
+ v0['vect'] for v0 in domain.values()
35
+ if v0.get('vect') is not None
36
+ })
34
37
 
35
38
  for vv in lvectu:
36
39
 
37
- lk0 = [k0 for k0, v0 in domain.items() if v0['vect'] == vv]
40
+ lk0 = [k0 for k0, v0 in domain.items() if v0.get('vect') == vv]
38
41
  for k0 in lk0:
39
42
 
40
43
  if domain[k0].get('domain') is None:
@@ -61,12 +64,14 @@ def _check(
61
64
 
62
65
  # ---------
63
66
  # prepare
67
+ # ---------
64
68
 
65
69
  ldata = list(coll.ddata.keys())
66
70
  lref = list(coll.dref.keys())
67
71
 
68
72
  # ------------
69
73
  # domain
74
+ # ------------
70
75
 
71
76
  c0 = (
72
77
  isinstance(domain, dict)
@@ -80,24 +85,17 @@ def _check(
80
85
  )
81
86
  raise Exception(msg)
82
87
 
83
- # ------------
88
+ # --------------
84
89
  # check each key
90
+ # --------------
85
91
 
86
92
  dfail = {}
87
93
  domain = copy.deepcopy(domain)
88
94
  for k0, v0 in domain.items():
89
95
 
90
- # check ref vector
91
- kwd = {'ref': k0} if k0 in lref else {'key0': k0}
92
- hasref, hasvect, ref, vect = coll.get_ref_vector(**kwd)[:4]
93
- if not (hasref and ref is not None):
94
- dfail[k0] = "No associated ref identified!"
95
- continue
96
- if not (hasvect and vect is not None):
97
- dfail[k0] = "No associated ref vector identified!"
98
- continue
99
-
96
+ # -----------
100
97
  # v0 is dict
98
+
101
99
  ltyp = (list, tuple, np.ndarray)
102
100
  if isinstance(v0, ltyp):
103
101
  domain[k0] = {'domain': v0}
@@ -106,21 +104,42 @@ def _check(
106
104
 
107
105
  c0 = (
108
106
  isinstance(domain[k0], dict)
109
- and any(ss in ['ind', 'domain'] for ss in domain[k0].keys())
107
+ and any([ss in ['ind', 'domain'] for ss in domain[k0].keys()])
110
108
  and (
111
109
  isinstance(domain[k0].get('domain'), ltyp)
112
110
  or np.isscalar(domain[k0].get('domain', 0))
113
111
  )
114
- and isinstance(domain[k0].get('ind', np.r_[0]), np.ndarray)
112
+ and isinstance(domain[k0].get('ind', np.r_[0]), (np.ndarray, int))
115
113
  )
114
+
116
115
  if not c0:
117
116
  dfail[k0] = "must be a dict with keys ['ind', 'domain']"
118
117
  continue
119
118
 
119
+ # ----------------
120
+ # check ref vector
121
+
122
+ kwd = {'ref': k0} if k0 in lref else {'key0': k0}
123
+ hasref, hasvect, ref, vect = coll.get_ref_vector(**kwd)[:4]
124
+
125
+ if not (hasref and ref is not None):
126
+ dfail[k0] = "No associated ref identified!"
127
+ continue
128
+
120
129
  # vect
121
- domain[k0]['vect'] = vect
130
+ domain[k0]['ref'] = ref
122
131
 
132
+ if domain[k0].get('domain') is not None:
133
+ if not (hasvect and vect is not None):
134
+ dfail[k0] = "No associated ref vector identified!"
135
+ continue
136
+
137
+ # vect
138
+ domain[k0]['vect'] = vect
139
+
140
+ # -------
123
141
  # domain
142
+
124
143
  dom = domain[k0].get('domain')
125
144
  if dom is not None:
126
145
  dom, err = _check_domain(dom)
@@ -129,10 +148,15 @@ def _check(
129
148
  continue
130
149
  domain[k0]['domain'] = dom
131
150
 
151
+ # -----
132
152
  # ind
153
+
133
154
  ind = domain[k0].get('ind')
134
155
  if ind is not None:
135
- vsize = coll.ddata[vect]['data'].size
156
+ if np.isscalar(ind):
157
+ ind = np.array([ind], dtype=int)
158
+
159
+ vsize = coll.dref[ref]['size']
136
160
  if ind.dtype == bool:
137
161
  pass
138
162
  elif 'int' in ind.dtype.name:
@@ -151,12 +175,14 @@ def _check(
151
175
 
152
176
  # -----------
153
177
  # errors
178
+ # -----------
154
179
 
155
180
  if len(dfail) > 0:
156
181
  lstr = [f"\t- '{k0}': {v0}" for k0, v0 in dfail.items()]
157
182
  msg = (
158
183
  "The following domain keys / values are not conform:\n"
159
184
  + "\n".join(lstr)
185
+ + f"\nProvided:\n{domain}"
160
186
  )
161
187
  raise Exception(msg)
162
188
 
@@ -88,6 +88,7 @@ def interpolate(
88
88
  ddata, dout, dsh_other, sli_c, sli_x, sli_v,
89
89
  log_log, nan0, grid, ndim, xunique,
90
90
  returnas, return_params, store, inplace,
91
+ domain,
91
92
  ) = _check(
92
93
  coll=coll,
93
94
  # interpolation base
@@ -143,7 +144,7 @@ def interpolate(
143
144
  # adjust data and ref if xunique
144
145
 
145
146
  if xunique:
146
- _xunique(dout)
147
+ _xunique(dout, domain=domain)
147
148
 
148
149
  # --------
149
150
  # store
@@ -392,9 +393,9 @@ def _check(
392
393
  )
393
394
 
394
395
  # ---------------------
395
- # get dvect from domain
396
+ # get dref_dom from domain
396
397
 
397
- domain, dvect = _get_dvect(
398
+ domain, dref_dom = _get_drefdom(
398
399
  coll=coll,
399
400
  domain=domain,
400
401
  ref_key=ref_key,
@@ -407,7 +408,7 @@ def _check(
407
408
  coll=coll,
408
409
  keys=keys,
409
410
  ref_key=ref_key,
410
- dvect=dvect,
411
+ dref_dom=dref_dom,
411
412
  )
412
413
 
413
414
  # --------
@@ -422,7 +423,7 @@ def _check(
422
423
  )
423
424
 
424
425
  if ref_com is not None and domain is not None:
425
- if ref_com in [coll.ddata[k0]['ref'][0] for k0 in dvect.keys()]:
426
+ if ref_com in list(dref_dom.keys()):
426
427
  msg = (
427
428
  "Arg ref_com and domain cannot be applied to the same ref!\n"
428
429
  f"\t- ref_com: {ref_com}\n"
@@ -440,8 +441,10 @@ def _check(
440
441
  x0=x0,
441
442
  daxis=daxis,
442
443
  dunits=dunits,
444
+ # ref com
443
445
  dref_com=dref_com,
444
- dvect=dvect,
446
+ # domain
447
+ dref_dom=dref_dom,
445
448
  )
446
449
 
447
450
  # --------------
@@ -488,6 +491,7 @@ def _check(
488
491
  ddata, dout, dsh_other, sli_c, sli_x, sli_v,
489
492
  log_log, nan0, grid, ndim, xunique,
490
493
  returnas, return_params, store, inplace,
494
+ domain,
491
495
  )
492
496
 
493
497
 
@@ -963,56 +967,59 @@ def _x01_grid(
963
967
  return x0, x1, refx, ix, xunique
964
968
 
965
969
 
966
- def _get_dvect(
970
+ def _get_drefdom(
967
971
  coll=None,
968
972
  domain=None,
969
973
  ref_key=None,
970
974
  ):
971
975
  # ----------------
972
- # domain => dvect
976
+ # domain => dref_dom
977
+
978
+ lr_ref_key = [coll.ddata[kk]['ref'][0] for kk in ref_key]
973
979
 
974
980
  if domain is not None:
975
981
 
976
982
  # get domain
977
983
  domain = coll.get_domain_ref(domain)
978
984
 
979
- # derive dvect
980
- lvectu = sorted({
981
- v0['vect'] for v0 in domain.values() if v0['vect'] not in ref_key
985
+ # derive lrefu
986
+ lrefu = sorted({
987
+ v0['ref'] for v0 in domain.values() if v0['ref'] not in lr_ref_key
982
988
  })
983
989
 
984
- dvect = {
985
- k0: [k1 for k1, v1 in domain.items() if v1['vect'] == k0]
986
- for k0 in lvectu
990
+ # derive dref_dom
991
+ dref_dom = {
992
+ rr: [k1 for k1, v1 in domain.items() if v1['ref'] == rr]
993
+ for rr in lrefu
987
994
  }
988
995
 
989
996
  # check unicity of vect
990
- dfail = {k0: v0 for k0, v0 in dvect.items() if len(v0) > 1}
997
+ dfail = {k0: v0 for k0, v0 in dref_dom.items() if len(v0) > 1}
991
998
  if len(dfail) > 0:
992
999
  lstr = [f"\t- '{k0}': {v0}" for k0, v0 in dfail.items()]
993
1000
  msg = (
994
- "Some ref vector have been specified with multiple domains!\n"
1001
+ "Some ref have been specified with multiple domains!\n"
995
1002
  + "\n".join(lstr)
996
1003
  )
997
1004
  raise Exception(msg)
998
1005
 
999
- # build final dvect
1000
- dvect = {
1006
+ # build final dref_dom
1007
+ dref_dom = {
1001
1008
  k0: domain[v0[0]]['ind']
1002
- for k0, v0 in dvect.items()
1009
+ for k0, v0 in dref_dom.items()
1003
1010
  }
1004
1011
 
1005
1012
  else:
1006
- dvect = None
1013
+ dref_dom = None
1007
1014
 
1008
- return domain, dvect
1015
+ return domain, dref_dom
1009
1016
 
1010
1017
 
1011
1018
  def _get_ddata(
1012
1019
  coll=None,
1013
1020
  keys=None,
1014
1021
  ref_key=None,
1015
- dvect=None,
1022
+ dref_dom=None,
1016
1023
  ):
1017
1024
 
1018
1025
  # --------
@@ -1024,13 +1031,12 @@ def _get_ddata(
1024
1031
  data = coll.ddata[k0]['data']
1025
1032
 
1026
1033
  # apply domain
1027
- if dvect is not None:
1028
- for k1, v1 in dvect.items():
1029
- ref = coll.ddata[k1]['ref'][0]
1030
- if ref in coll.ddata[k0]['ref']:
1031
- ax = coll.ddata[k0]['ref'].index(ref)
1034
+ if dref_dom is not None:
1035
+ for rr, vr in dref_dom.items():
1036
+ if rr in coll.ddata[k0]['ref']:
1037
+ ax = coll.ddata[k0]['ref'].index(rr)
1032
1038
  sli = tuple([
1033
- v1 if ii == ax else slice(None)
1039
+ vr if ii == ax else slice(None)
1034
1040
  for ii in range(data.ndim)
1035
1041
  ])
1036
1042
  data = data[sli]
@@ -1050,7 +1056,7 @@ def _get_dout(
1050
1056
  # common refs
1051
1057
  dref_com=None,
1052
1058
  # domain
1053
- dvect=None,
1059
+ dref_dom=None,
1054
1060
  ):
1055
1061
 
1056
1062
  # -------------
@@ -1069,11 +1075,11 @@ def _get_dout(
1069
1075
  rd = list(coll.ddata[k0]['ref'])
1070
1076
 
1071
1077
  # apply domain
1072
- if dvect is not None:
1073
- for k1, v1 in dvect.items():
1074
- if coll.ddata[k1]['ref'][0] in rd:
1075
- ax = rd.index(coll.ddata[k1]['ref'][0])
1076
- sh[ax] = len(v1) if v1.dtype == int else v1.sum()
1078
+ if dref_dom is not None:
1079
+ for rr, vr in dref_dom.items():
1080
+ if rr in rd:
1081
+ ax = rd.index(rr)
1082
+ sh[ax] = len(vr) if vr.dtype == int else vr.sum()
1077
1083
  rd[ax] = None
1078
1084
 
1079
1085
  # ------------------------
@@ -1556,7 +1562,7 @@ def _interp2d(
1556
1562
  # ###############################################################
1557
1563
 
1558
1564
 
1559
- def _xunique(dout=None):
1565
+ def _xunique(dout=None, domain=None):
1560
1566
  """ interpolation on a single point => eliminates a ref """
1561
1567
 
1562
1568
  # ----------
@@ -1567,13 +1573,18 @@ def _xunique(dout=None):
1567
1573
  for k0, v0 in dout.items()
1568
1574
  }
1569
1575
 
1570
- dwrong = {k0: v0 for k0, v0 in dind.items() if len(v0) != 1}
1576
+ # Number of Nones expected
1577
+ ndom = 0 if domain is None else len(domain)
1578
+ nNone = 1 + ndom
1579
+
1580
+ # check
1581
+ dwrong = {k0: v0 for k0, v0 in dind.items() if len(v0) != nNone}
1571
1582
  if len(dwrong) > 0:
1572
1583
  lstr = [
1573
1584
  f"\t- {k0}: {dout[k0]['ref']} => {v0}" for k0, v0 in dwrong.items()
1574
1585
  ]
1575
1586
  msg = (
1576
- "Interpolation at unique point => ref should have one None:\n"
1587
+ "Interpolate unique pt => ref should have nNone = 1 + {ndom}:\n"
1577
1588
  + "\n".join(lstr)
1578
1589
  )
1579
1590
  raise Exception(msg)
@@ -1616,7 +1627,12 @@ def _store(
1616
1627
  ldata = list(set(itt.chain.from_iterable([
1617
1628
  v0['ref'] for v0 in dout.values()
1618
1629
  ])))
1619
- coll2 = coll.extract(keys=ldata, vectors=True)
1630
+
1631
+ coll2 = coll.extract(
1632
+ keys=ldata,
1633
+ inc_vectors=True,
1634
+ return_keys=False,
1635
+ )
1620
1636
 
1621
1637
  # -------------
1622
1638
  # store_keys
@@ -1634,7 +1650,13 @@ def _store(
1634
1650
  excluded=lout,
1635
1651
  )
1636
1652
 
1637
- assert len(store_keys) == len(dout)
1653
+ if len(store_keys) != len(dout):
1654
+ msg = (
1655
+ "Nb of store_keys != nb of keys in dout!\n"
1656
+ f"\t- store_keys:\n{store_keys}\n "
1657
+ f"\t- dout.keys():\n{sorted(dout.keys())}\n "
1658
+ )
1659
+ raise Exception(msg)
1638
1660
 
1639
1661
  # ---------
1640
1662
  # add data
@@ -1648,4 +1670,4 @@ def _store(
1648
1670
  units=v0['units'],
1649
1671
  )
1650
1672
 
1651
- return coll2
1673
+ return coll2
@@ -306,11 +306,26 @@ class Test02_Manipulate():
306
306
  self.st.show_data()
307
307
  self.st.show_obj()
308
308
 
309
+ # ------------------------
310
+ # dcolor
311
+ # ------------------------
312
+
313
+ def test06_get_dcolor_touch(self):
314
+ xx = np.arange(50)
315
+ aa = np.exp(-(xx[:, None]-25)**2/10**2 - (xx[None, :]-25)**2/10**2)
316
+ ind = (aa>0.3) & (np.arange(50)[None, :] > 25)
317
+ dcolor = self.st.get_color_touch(
318
+ aa,
319
+ dcolor={'foo': {'ind': ind, 'color': 'r'}}
320
+ )
321
+ assert dcolor['color'].shape == aa.shape + (4,)
322
+ assert dcolor['meaning'][(1.0, 0.0, 0.0)] == ['foo']
323
+
309
324
  # ------------------------
310
325
  # Interpolate
311
326
  # ------------------------
312
327
 
313
- def test06_get_ref_vector(self):
328
+ def test07_get_ref_vector(self):
314
329
  (
315
330
  hasref, hasvector,
316
331
  ref, key_vector,
@@ -325,13 +340,13 @@ class Test02_Manipulate():
325
340
  assert values.size == dind['ind'].size == 4
326
341
  assert dind['indr'].shape == (2, 4)
327
342
 
328
- def test07_get_ref_vector_common(self):
343
+ def test08_get_ref_vector_common(self):
329
344
  hasref, ref, key, val, dout = self.st.get_ref_vector_common(
330
345
  keys=['t0', 'prof0', 'prof1', 't3'],
331
346
  dim='time',
332
347
  )
333
348
 
334
- def test08_domain_ref(self):
349
+ def test09_domain_ref(self):
335
350
 
336
351
  domain = {
337
352
  'nx': [1.5, 2],
@@ -347,7 +362,7 @@ class Test02_Manipulate():
347
362
  lk = list(domain.keys())
348
363
  assert all([isinstance(dout[k0]['ind'], np.ndarray) for k0 in lk])
349
364
 
350
- def test09_binning(self):
365
+ def test10_binning(self):
351
366
 
352
367
  bins = np.linspace(1, 5, 8)
353
368
  lk = [
@@ -399,7 +414,7 @@ class Test02_Manipulate():
399
414
  )
400
415
  raise Exception(msg)
401
416
 
402
- def test10_interpolate(self):
417
+ def test11_interpolate(self):
403
418
 
404
419
  lk = ['y', 'y', 'prof0', 'prof0', 'prof0', '3d']
405
420
  lref = [None, 'nx', 't0', ['nt0', 'nx'], ['t0', 'x'], ['t0', 'x']]
@@ -443,7 +458,7 @@ class Test02_Manipulate():
443
458
  msg = str(dout[kk]['data'].shape, shape, kk, rr)
444
459
  raise Exception(msg)
445
460
 
446
- def test11_interpolate_common_refs(self):
461
+ def test12_interpolate_common_refs(self):
447
462
  lk = ['3d', '3d', '3d']
448
463
  lref = ['t0', ['nt0', 'nx'], ['nx']]
449
464
  lrefc = ['nc', 'nc', 'nt0']
@@ -519,17 +534,17 @@ class Test02_Manipulate():
519
534
  # Plotting
520
535
  # ------------------------
521
536
 
522
- def test12_plot_as_array_1d(self):
537
+ def test13_plot_as_array_1d(self):
523
538
  dax = self.st.plot_as_array(key='t0')
524
539
  plt.close('all')
525
540
  del dax
526
541
 
527
- def test13_plot_as_array_2d(self):
542
+ def test14_plot_as_array_2d(self):
528
543
  dax = self.st.plot_as_array(key='prof0')
529
544
  plt.close('all')
530
545
  del dax
531
546
 
532
- def test14_plot_as_array_2d_log(self):
547
+ def test15_plot_as_array_2d_log(self):
533
548
  dax = self.st.plot_as_array(
534
549
  key='pec', keyX='ne', keyY='Te',
535
550
  dscale={'data': 'log'},
@@ -537,17 +552,17 @@ class Test02_Manipulate():
537
552
  plt.close('all')
538
553
  del dax
539
554
 
540
- def test15_plot_as_array_3d(self):
555
+ def test16_plot_as_array_3d(self):
541
556
  dax = self.st.plot_as_array(key='3d', dvminmax={'keyX': {'min': 0}})
542
557
  plt.close('all')
543
558
  del dax
544
559
 
545
- def test16_plot_as_array_3d_ZNonMonot(self):
560
+ def test17_plot_as_array_3d_ZNonMonot(self):
546
561
  dax = self.st.plot_as_array(key='3d', keyZ='y')
547
562
  plt.close('all')
548
563
  del dax
549
564
 
550
- def test17_plot_as_array_4d(self):
565
+ def test18_plot_as_array_4d(self):
551
566
  dax = self.st.plot_as_array(key='4d', dscale={'keyU': 'linear'})
552
567
  plt.close('all')
553
568
  del dax
@@ -557,7 +572,7 @@ class Test02_Manipulate():
557
572
  # plt.close('all')
558
573
  # del dax
559
574
 
560
- def test19_plot_as_profile1d(self):
575
+ def test20_plot_as_profile1d(self):
561
576
  dax = self.st.plot_as_profile1d(
562
577
  key='prof0',
563
578
  key_time='t0',
@@ -591,7 +606,7 @@ class Test02_Manipulate():
591
606
  # File handling
592
607
  # ------------------------
593
608
 
594
- def test21_copy_equal(self):
609
+ def test22_copy_equal(self):
595
610
  st2 = self.st.copy()
596
611
  assert st2 is not self.st
597
612
 
@@ -599,15 +614,15 @@ class Test02_Manipulate():
599
614
  if msg is not True:
600
615
  raise Exception(msg)
601
616
 
602
- def test22_get_nbytes(self):
617
+ def test23_get_nbytes(self):
603
618
  nb, dnb = self.st.get_nbytes()
604
619
 
605
- def test23_save_pfe(self, verb=False):
620
+ def test24_save_pfe(self, verb=False):
606
621
  pfe = os.path.join(_PATH_OUTPUT, 'testsave.npz')
607
622
  self.st.save(pfe=pfe, return_pfe=False)
608
623
  os.remove(pfe)
609
624
 
610
- def test24_saveload(self, verb=False):
625
+ def test25_saveload(self, verb=False):
611
626
  pfe = self.st.save(path=_PATH_OUTPUT, verb=verb, return_pfe=True)
612
627
  st2 = load(pfe, verb=verb)
613
628
  # Just to check the loaded version works fine
@@ -616,7 +631,7 @@ class Test02_Manipulate():
616
631
  raise Exception(msg)
617
632
  os.remove(pfe)
618
633
 
619
- def test25_saveload_coll(self, verb=False):
634
+ def test26_saveload_coll(self, verb=False):
620
635
  pfe = self.st.save(path=_PATH_OUTPUT, verb=verb, return_pfe=True)
621
636
  st = DataStock()
622
637
  st2 = load(pfe, coll=st, verb=verb)
datastock/version.py CHANGED
@@ -1,2 +1,2 @@
1
1
  # Do not edit, pipeline versioning governed by git tags!
2
- __version__ = '0.0.44'
2
+ __version__ = '0.0.46'
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: datastock
3
- Version: 0.0.44
3
+ Version: 0.0.46
4
4
  Summary: A python library for generic class and data handling
5
5
  Home-page: https://github.com/ToFuProject/datastock
6
6
  Author: Didier VEZINET
@@ -3,12 +3,13 @@ datastock/_DataCollection_utils.py,sha256=hHf6HvGKMmM-psx3fj9QcY1TEmKrAtTdkRokH7
3
3
  datastock/__init__.py,sha256=i_Ijl-AM07n4zN52frWfbeGN1iB6v4e5oLzTuVIh_oM,217
4
4
  datastock/_class.py,sha256=Az9PS3aSskiPMb1ekt78Y2ynBujYVc_cDjJxW9xH9g4,47
5
5
  datastock/_class0.py,sha256=je4ckTynK8lEGBa7OSURYZZ_-3XjzAtf6SazN3b-f5k,6028
6
- datastock/_class1.py,sha256=WlI666OOG8x-qtfNRcwP7aoCs7TDfvk3yKzVsHShO7c,28745
6
+ datastock/_class1.py,sha256=2PrIT26yRfNrP6YmZMpvb0_b0v_397Y9tOjzeVOxK_I,29313
7
7
  datastock/_class1_binning.py,sha256=LWHv2LIfgZfSFWYwqdcN0DKpNe6q7Go3sxfcJqmzTrI,28085
8
8
  datastock/_class1_check.py,sha256=0azV7ftoAWsqTMEYbGQ_luJi95Px-pBif_vOug3W8Zg,50978
9
- datastock/_class1_compute.py,sha256=yHdG0afYc_YtjpR6RvMh7SeRtWEyuHZ5y9VOPRIYVDo,31671
10
- datastock/_class1_domain.py,sha256=bkuCl29QO7C3RchC8qZyreU90QxmdDYNVYDmzuCLCUY,6252
11
- datastock/_class1_interpolate.py,sha256=3LRKK6aOepJLSnRoRpsS8RAnobqTdE-QZrkdQrnIaqc,37825
9
+ datastock/_class1_color_touch.py,sha256=KoIFCVkJJnGrEChbvDSkgs-KZeRpFyCIA-fE7mrYrLs,6294
10
+ datastock/_class1_compute.py,sha256=yZfj-Fy4wlEyWotWqNwtNtpjF6mak5nMu2ut2OBpzTY,31672
11
+ datastock/_class1_domain.py,sha256=_xUCnwWJX5wPPYrpiGGrRwrPLmiO5BMm3nmYvn_YEOg,6716
12
+ datastock/_class1_interpolate.py,sha256=3VKGMDsiWFQUguMHxMaTQTyMhcN8Ikg1PmaH6TjIeLg,38348
12
13
  datastock/_class1_show.py,sha256=hqd-FeJ1NqiOzbrHzGMrwIo8_lLsjC199Zmw68NqkDQ,11745
13
14
  datastock/_class1_uniformize.py,sha256=dEJime_0SqmW8hX8ooZpHsPI_d8CIE9U9Yz9GhqsEUY,28433
14
15
  datastock/_class2.py,sha256=FG-ZGPVdZEdkRc_3Z9LRzYdRm9Xat7HI06E3-hI5rCk,45422
@@ -31,12 +32,12 @@ datastock/_plot_correlations.py,sha256=ITOypu_AEoKl0ihxocV-JVTXIHqut6p9TfG-xZmQy
31
32
  datastock/_plot_old_backup.py,sha256=XixTi2CiihKjtQP0TRycH0b25caWN1m35DgpsDeiWZE,21729
32
33
  datastock/_plot_text.py,sha256=wQPqjfpLyIioS2JeOt3E9C9HgYUJ49YEoOgRuKYvAR8,3143
33
34
  datastock/_saveload.py,sha256=1vAMp27KfqXbo5b_Pi8hJux0stsHq6dO5vy8k1d4_iA,14141
34
- datastock/version.py,sha256=EX8urNJ1ILJpXuRtvcV0i0MWqPQj1IxD_snvo_plkfg,80
35
+ datastock/version.py,sha256=TTZXyi6fkfPDWOJIxiEa00X66SrcWUHC11j32Bjcyh0,80
35
36
  datastock/tests/__init__.py,sha256=teOo2xP0IO7PQMuMDmum61XVHe2TuxW3BiHiL73X8jQ,35
36
- datastock/tests/test_01_DataStock.py,sha256=tGVJiCVjWT5oR4DlFopbcKMxi2cpCbBvdBDRc0WV1KU,17532
37
+ datastock/tests/test_01_DataStock.py,sha256=Ngz0nbb7Qj3Sr0c4TV2OlK_v4R6aMBrQYXr8BWaYAnw,18048
37
38
  datastock/tests/output/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
38
- datastock-0.0.44.dist-info/LICENSE,sha256=V1uXqi3vxR0QhB4QdFyjkynl6jpN4wZmlB5EMYJk0NM,1068
39
- datastock-0.0.44.dist-info/METADATA,sha256=XTfrvF04pd1W2uJ7lOUhlFWpvL7pQBWxwa7Updlhazc,8660
40
- datastock-0.0.44.dist-info/WHEEL,sha256=P9jw-gEje8ByB7_hXoICnHtVCrEwMQh-630tKvQWehc,91
41
- datastock-0.0.44.dist-info/top_level.txt,sha256=BzJsLLK_zZw13WQCoMhC74qWVKalnVCjBxdPXvJn7HQ,25
42
- datastock-0.0.44.dist-info/RECORD,,
39
+ datastock-0.0.46.dist-info/LICENSE,sha256=V1uXqi3vxR0QhB4QdFyjkynl6jpN4wZmlB5EMYJk0NM,1068
40
+ datastock-0.0.46.dist-info/METADATA,sha256=rs7ozSmlV2xJ4_yOJIwDrDj3brqxOHxqZRlG7W2hLqA,8660
41
+ datastock-0.0.46.dist-info/WHEEL,sha256=P9jw-gEje8ByB7_hXoICnHtVCrEwMQh-630tKvQWehc,91
42
+ datastock-0.0.46.dist-info/top_level.txt,sha256=BzJsLLK_zZw13WQCoMhC74qWVKalnVCjBxdPXvJn7HQ,25
43
+ datastock-0.0.46.dist-info/RECORD,,