datastock 0.0.35__py3-none-any.whl → 0.0.37__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,9 +1,7 @@
1
1
  # -*- coding: utf-8 -*-
2
2
 
3
3
  # Builtin
4
- import itertools as itt
5
4
  import copy
6
- import warnings
7
5
 
8
6
 
9
7
  # Common
@@ -13,7 +11,6 @@ import numpy as np
13
11
  # import scipy.linalg as scplin
14
12
  import scipy.stats as scpstats
15
13
  import scipy.spatial as scpspace
16
- import datastock as ds
17
14
 
18
15
 
19
16
  from . import _generic_check
@@ -31,9 +28,19 @@ _LCROSS_OK = ['spearman', 'pearson', 'distance']
31
28
  def _get_slice(laxis=None, ndim=None):
32
29
 
33
30
  nax = len(laxis)
34
- assert nax in range(1, ndim + 1)
31
+ if not (nax >= 0 and nax <= ndim):
32
+ msg = (
33
+ "Something wrong with the number of axis provided:\n"
34
+ f"\t- laxis: {laxis}\n"
35
+ f"\t- ndim: {ndim}\n"
36
+ )
37
+ raise Exception(msg)
35
38
 
36
- if ndim == nax:
39
+ if nax == 0:
40
+ def fslice(*args):
41
+ return tuple([slice(None) for ii in laxis])
42
+
43
+ elif ndim == nax:
37
44
  def fslice(*args):
38
45
  return args
39
46
 
@@ -206,7 +213,7 @@ def propagate_indices_per_ref(
206
213
  ]
207
214
  msg = (
208
215
  "Provided ldata are not suitable:\n"
209
- + "\n".join(lstr)
216
+ + "\n".join(lstr)
210
217
  )
211
218
  raise Exception(msg)
212
219
 
@@ -233,7 +240,6 @@ def propagate_indices_per_ref(
233
240
  raise Exception(msg)
234
241
  ref_data = ref_data[0]
235
242
 
236
-
237
243
  # For each ref in lref, get list of matching data
238
244
  drdata = {
239
245
  rr: [
@@ -705,16 +711,54 @@ def correlations(
705
711
 
706
712
  def _extract_instance(
707
713
  coll=None,
708
- lref=None,
709
- ldata=None,
714
+ keys=None,
715
+ # optional include
716
+ inc_monot=None,
717
+ inc_vectors=None,
718
+ inc_allrefs=None,
719
+ # output
710
720
  coll2=None,
721
+ inplace=None,
722
+ return_keys=None,
711
723
  ):
712
724
 
725
+ # --------------------
726
+ # check inputs
727
+ # --------------------
728
+
729
+ (
730
+ keys,
731
+ inc_monot, inc_vectors, inc_allrefs,
732
+ inplace, return_keys,
733
+ ) = _extract_check(**locals())
734
+
735
+ # -----------------
736
+ # select
737
+ # -----------------
738
+
739
+ ldata, lref = _extract_select(
740
+ coll=coll,
741
+ keys=keys,
742
+ # optional includes
743
+ inc_monot=inc_monot,
744
+ inc_vectors=inc_vectors,
745
+ inc_allrefs=inc_allrefs,
746
+ )
747
+
713
748
  # -------------------
714
749
  # Instanciate
750
+ # -------------------
715
751
 
716
752
  if coll2 is None:
717
- coll2 = coll.__class__()
753
+ if inplace is True:
754
+ coll2 = coll
755
+ else:
756
+ coll2 = coll.__class__()
757
+
758
+
759
+ # ------------------------
760
+ # Populate
761
+ # ------------------------
718
762
 
719
763
  # -------------------
720
764
  # Populate with ref
@@ -744,57 +788,173 @@ def _extract_instance(
744
788
  **copy.deepcopy({pp: coll._ddata[k0][pp] for pp in lpar}),
745
789
  )
746
790
 
747
- return coll2
748
-
791
+ # -------------
792
+ # return
793
+ # -------------
749
794
 
750
- def _extract_dataref(coll=None, keys=None, vectors=None):
795
+ if return_keys is True:
796
+ return coll2, keys
797
+ else:
798
+ return coll2
751
799
 
752
- # ----------------
753
- # check inputs
754
800
 
755
- # vectors
756
- vectors = ds._generic_check._check_var(
757
- vectors, 'vectors',
758
- types=bool,
759
- default=True,
760
- )
801
+ def _extract_check(
802
+ coll=None,
803
+ keys=None,
804
+ # optional include
805
+ inc_monot=None,
806
+ inc_vectors=None,
807
+ inc_allrefs=None,
808
+ vectors=None,
809
+ # output
810
+ coll2=None,
811
+ inplace=None,
812
+ return_keys=None,
813
+ ):
761
814
 
815
+ # --------------
762
816
  # keys
763
- if keys is None:
764
- return
817
+ # --------------
818
+
765
819
  if isinstance(keys, str):
766
820
  keys = [keys]
767
821
 
768
822
  lokd = list(coll._ddata.keys())
769
823
  lokr = list(coll._dref.keys())
770
- keys = _generic_check._check_var_iter(
824
+ keys = list(set(_generic_check._check_var_iter(
771
825
  keys, 'keys',
826
+ default=None,
772
827
  types=list,
773
- allowed=lokr + lokd,
828
+ types_iter=str,
829
+ allowed=lokd+lokr,
830
+ )))
831
+
832
+ # -----------------
833
+ # optional includes
834
+ # -----------------
835
+
836
+ # monotonous vectors
837
+ inc_monot = _generic_check._check_var(
838
+ inc_monot, 'inc_monot',
839
+ types=bool,
840
+ default=True,
841
+ )
842
+
843
+ # any vectors
844
+ inc_vectors = _generic_check._check_var(
845
+ inc_vectors, 'inc_vectors',
846
+ types=bool,
847
+ default=False,
774
848
  )
775
849
 
776
- # -----------------------------
777
- # Get corresponding list of ref
850
+ # any nd array
851
+ inc_allrefs = _generic_check._check_var(
852
+ inc_allrefs, 'inc_allrefs',
853
+ types=bool,
854
+ default=False,
855
+ )
778
856
 
779
- lref = set(
780
- [k0 for k0 in keys if k0 in lokr]
781
- + [
782
- k0 for k0, v0 in coll._dref.items()
783
- if any([ss in keys for ss in v0['ldata']])
784
- ]
857
+ # -----------------
858
+ # output
859
+ # -----------------
860
+
861
+ # return_keys
862
+ return_keys = _generic_check._check_var(
863
+ return_keys, 'return_keys',
864
+ types=bool,
865
+ default=False,
866
+ )
867
+
868
+ # inplace
869
+ inplace = _generic_check._check_var(
870
+ inplace, 'inplace',
871
+ types=bool,
872
+ default=False,
785
873
  )
786
874
 
787
- # add vectors
788
- if vectors is True:
789
- keys = set(
790
- [k0 for k0 in keys if k0 in lokd]
791
- + list(itt.chain.from_iterable([
792
- coll.dref[k0]['ldata_monot']
793
- for k0 in lref
794
- ]))
875
+ # coll2
876
+ if coll2 is not None:
877
+ c0 = (
878
+ issubclass(coll2.__class__, coll.__class__)
879
+ or issubclass(coll.__class__, coll2.__class__)
795
880
  )
881
+ if not c0:
882
+ msg = "Arg coll2 must be a DataStock subclass instance"
883
+ raise Exception(msg)
884
+
885
+ return (
886
+ keys,
887
+ inc_monot, inc_vectors, inc_allrefs,
888
+ inplace, return_keys,
889
+ )
890
+
891
+
892
+ def _extract_select(
893
+ coll=None,
894
+ keys=None,
895
+ # optional include
896
+ inc_monot=None,
897
+ inc_vectors=None,
898
+ inc_allrefs=None,
899
+ ):
900
+
901
+ # ----------------------
902
+ # get all relevant refs
903
+
904
+ ldata0 = [k0 for k0 in keys if k0 in coll.ddata.keys()]
905
+ lref = list(set(
906
+ [k0 for k0 in keys if k0 in coll.dref.keys()]
907
+ + [
908
+ rr for rr in coll.dref.keys()
909
+ if any([rr in coll.ddata[k0]['ref'] for k0 in ldata0])
910
+ ]
911
+ ))
912
+
913
+ # ------------------------
914
+ # loop on refs for vectors
915
+
916
+ any_inc = any([inc_monot, inc_vectors, inc_allrefs])
917
+ if any_inc:
918
+ ldata = []
919
+
920
+ if inc_monot is True or inc_vectors is True:
921
+
922
+ for rr in lref:
923
+ for k0, v0 in coll.ddata.items():
924
+
925
+ c0 = (
926
+ v0['ref'] == (rr,)
927
+ and (
928
+ inc_vectors is True
929
+ or (
930
+ inc_monot is True
931
+ and v0['monot'] == (True,)
932
+ )
933
+ )
934
+ )
935
+
936
+ if c0:
937
+ ldata.append(k0)
938
+
939
+ # ------------------------
940
+ # loop on data0 for ndarrays
941
+
942
+ if inc_allrefs is True:
943
+
944
+ for dd in ldata0:
945
+ ref = coll.ddata[dd]['ref']
946
+
947
+ for k0, v0 in coll.ddata.items():
948
+ if v0['ref'] == ref:
949
+ ldata.append(k0)
950
+
951
+ # ----------------------
952
+ # return
953
+
954
+ if any_inc:
955
+ ldata0 = list(set(ldata0 + ldata))
796
956
 
797
- return lref, keys
957
+ return ldata0, lref
798
958
 
799
959
 
800
960
  #############################################
@@ -1073,4 +1233,4 @@ def _extract_dataref(coll=None, keys=None, vectors=None):
1073
1233
  # lkey=[idq2dR],
1074
1234
  # return_all=True,
1075
1235
  # )
1076
- # return out
1236
+ # return out
@@ -88,7 +88,7 @@ def _check(
88
88
  for k0, v0 in domain.items():
89
89
 
90
90
  # check ref vector
91
- kwd = {'ref': k0} if k0 in lref else {'key': k0}
91
+ kwd = {'ref': k0} if k0 in lref else {'key0': k0}
92
92
  hasref, hasvect, ref, vect = coll.get_ref_vector(**kwd)[:4]
93
93
  if not (hasref and ref is not None):
94
94
  dfail[k0] = "No associated ref identified!"
@@ -244,4 +244,4 @@ def _set_ind_from_domain(
244
244
 
245
245
  ind = ind_in & (~ind_out)
246
246
 
247
- return ind
247
+ return ind
@@ -241,7 +241,7 @@ def _check_keys(
241
241
  if rr in lref:
242
242
  kwd = {'ref': rr}
243
243
  else:
244
- kwd = {'key': rr}
244
+ kwd = {'key0': rr}
245
245
  hasref, hasvect, ref, ref_key[ii] = coll.get_ref_vector(
246
246
  **kwd,
247
247
  )[:4]
@@ -1550,7 +1550,7 @@ def _xunique(dout=None):
1550
1550
 
1551
1551
  # ----------
1552
1552
  # safety check
1553
-
1553
+
1554
1554
  dind = {
1555
1555
  k0: [jj for jj, rr in enumerate(v0['ref']) if rr is None]
1556
1556
  for k0, v0 in dout.items()
@@ -1566,10 +1566,10 @@ def _xunique(dout=None):
1566
1566
  + "\n".join(lstr)
1567
1567
  )
1568
1568
  raise Exception(msg)
1569
-
1569
+
1570
1570
  # --------------
1571
1571
  # ajusting dout
1572
-
1572
+
1573
1573
  for k0, v0 in dout.items():
1574
1574
 
1575
1575
  i0 = dind[k0][0]
@@ -1609,12 +1609,12 @@ def _store(
1609
1609
 
1610
1610
  # -------------
1611
1611
  # store_keys
1612
-
1612
+
1613
1613
  if store_keys is None:
1614
1614
  store_keys = [f"{k0}_interp" for k0 in dout.keys()]
1615
1615
  if isinstance(store_keys, str):
1616
1616
  store_keys = [store_keys]
1617
-
1617
+
1618
1618
  lout = list(coll.ddata.keys())
1619
1619
  store_keys = _generic_check._check_var_iter(
1620
1620
  store_keys, 'store_keys',
@@ -1622,7 +1622,7 @@ def _store(
1622
1622
  types_iter=str,
1623
1623
  excluded=lout,
1624
1624
  )
1625
-
1625
+
1626
1626
  assert len(store_keys) == len(dout)
1627
1627
 
1628
1628
  # ---------
@@ -1637,4 +1637,4 @@ def _store(
1637
1637
  units=v0['units'],
1638
1638
  )
1639
1639
 
1640
- return coll2
1640
+ return coll2