datastock 0.0.43__tar.gz → 0.0.45__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (49) hide show
  1. {datastock-0.0.43/datastock.egg-info → datastock-0.0.45}/PKG-INFO +1 -1
  2. {datastock-0.0.43 → datastock-0.0.45}/datastock/_class0.py +4 -2
  3. {datastock-0.0.43 → datastock-0.0.45}/datastock/_class1.py +2 -2
  4. {datastock-0.0.43 → datastock-0.0.45}/datastock/_class1_domain.py +43 -17
  5. {datastock-0.0.43 → datastock-0.0.45}/datastock/_class1_interpolate.py +47 -37
  6. {datastock-0.0.43 → datastock-0.0.45}/datastock/_saveload.py +27 -6
  7. {datastock-0.0.43 → datastock-0.0.45}/datastock/tests/test_01_DataStock.py +10 -0
  8. {datastock-0.0.43 → datastock-0.0.45}/datastock/version.py +1 -1
  9. {datastock-0.0.43 → datastock-0.0.45/datastock.egg-info}/PKG-INFO +1 -1
  10. {datastock-0.0.43 → datastock-0.0.45}/LICENSE +0 -0
  11. {datastock-0.0.43 → datastock-0.0.45}/MANIFEST.in +0 -0
  12. {datastock-0.0.43 → datastock-0.0.45}/README.md +0 -0
  13. {datastock-0.0.43 → datastock-0.0.45}/_updateversion.py +0 -0
  14. {datastock-0.0.43 → datastock-0.0.45}/datastock/_DataCollection_utils.py +0 -0
  15. {datastock-0.0.43 → datastock-0.0.45}/datastock/__init__.py +0 -0
  16. {datastock-0.0.43 → datastock-0.0.45}/datastock/_class.py +0 -0
  17. {datastock-0.0.43 → datastock-0.0.45}/datastock/_class1_binning.py +0 -0
  18. {datastock-0.0.43 → datastock-0.0.45}/datastock/_class1_check.py +0 -0
  19. {datastock-0.0.43 → datastock-0.0.45}/datastock/_class1_compute.py +0 -0
  20. {datastock-0.0.43 → datastock-0.0.45}/datastock/_class1_show.py +0 -0
  21. {datastock-0.0.43 → datastock-0.0.45}/datastock/_class1_uniformize.py +0 -0
  22. {datastock-0.0.43 → datastock-0.0.45}/datastock/_class2.py +0 -0
  23. {datastock-0.0.43 → datastock-0.0.45}/datastock/_class2_interactivity.py +0 -0
  24. {datastock-0.0.43 → datastock-0.0.45}/datastock/_class3.py +0 -0
  25. {datastock-0.0.43 → datastock-0.0.45}/datastock/_direct_calls.py +0 -0
  26. {datastock-0.0.43 → datastock-0.0.45}/datastock/_export_dataframe.py +0 -0
  27. {datastock-0.0.43 → datastock-0.0.45}/datastock/_find_plateau.py +0 -0
  28. {datastock-0.0.43 → datastock-0.0.45}/datastock/_generic_check.py +0 -0
  29. {datastock-0.0.43 → datastock-0.0.45}/datastock/_generic_utils.py +0 -0
  30. {datastock-0.0.43 → datastock-0.0.45}/datastock/_generic_utils_plot.py +0 -0
  31. {datastock-0.0.43 → datastock-0.0.45}/datastock/_plot_BvsA_as_distribution.py +0 -0
  32. {datastock-0.0.43 → datastock-0.0.45}/datastock/_plot_BvsA_as_distribution_check.py +0 -0
  33. {datastock-0.0.43 → datastock-0.0.45}/datastock/_plot_as_array.py +0 -0
  34. {datastock-0.0.43 → datastock-0.0.45}/datastock/_plot_as_array_1d.py +0 -0
  35. {datastock-0.0.43 → datastock-0.0.45}/datastock/_plot_as_array_234d.py +0 -0
  36. {datastock-0.0.43 → datastock-0.0.45}/datastock/_plot_as_mobile_lines.py +0 -0
  37. {datastock-0.0.43 → datastock-0.0.45}/datastock/_plot_as_profile1d.py +0 -0
  38. {datastock-0.0.43 → datastock-0.0.45}/datastock/_plot_correlations.py +0 -0
  39. {datastock-0.0.43 → datastock-0.0.45}/datastock/_plot_old_backup.py +0 -0
  40. {datastock-0.0.43 → datastock-0.0.45}/datastock/_plot_text.py +0 -0
  41. {datastock-0.0.43 → datastock-0.0.45}/datastock/tests/__init__.py +0 -0
  42. {datastock-0.0.43 → datastock-0.0.45}/datastock/tests/output/__init__.py +0 -0
  43. {datastock-0.0.43 → datastock-0.0.45}/datastock.egg-info/SOURCES.txt +0 -0
  44. {datastock-0.0.43 → datastock-0.0.45}/datastock.egg-info/dependency_links.txt +0 -0
  45. {datastock-0.0.43 → datastock-0.0.45}/datastock.egg-info/requires.txt +0 -0
  46. {datastock-0.0.43 → datastock-0.0.45}/datastock.egg-info/top_level.txt +0 -0
  47. {datastock-0.0.43 → datastock-0.0.45}/pyproject.toml +0 -0
  48. {datastock-0.0.43 → datastock-0.0.45}/setup.cfg +0 -0
  49. {datastock-0.0.43 → datastock-0.0.45}/setup.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: datastock
3
- Version: 0.0.43
3
+ Version: 0.0.45
4
4
  Summary: A python library for generic class and data handling
5
5
  Home-page: https://github.com/ToFuProject/datastock
6
6
  Author: Didier VEZINET
@@ -65,7 +65,7 @@ class DataStock0(object):
65
65
  )
66
66
 
67
67
  @classmethod
68
- def from_dict(cls, din=None, isflat=None, sep=None):
68
+ def from_dict(cls, din=None, isflat=None, sep=None, obj=None):
69
69
  """ Populate the instances attributes using an input dict
70
70
 
71
71
  The input dict must be properly formatted
@@ -85,7 +85,9 @@ class DataStock0(object):
85
85
  # ---------------------
86
86
  # Instanciate and populate
87
87
 
88
- obj = cls()
88
+ if obj is None:
89
+ obj = cls()
90
+
89
91
  for k0 in din.keys():
90
92
  if k0 == '_ddef':
91
93
  if 'dobj' not in din[k0]['params'].keys():
@@ -369,8 +369,8 @@ class DataStock1(DataStock0):
369
369
  ###########
370
370
 
371
371
  @classmethod
372
- def from_dict(cls, din=None, sep=None):
373
- obj = super().from_dict(din=din, sep=sep)
372
+ def from_dict(cls, din=None, sep=None, obj=None):
373
+ obj = super().from_dict(din=din, sep=sep, obj=obj)
374
374
  obj.update()
375
375
  return obj
376
376
 
@@ -30,11 +30,14 @@ def domain_ref(
30
30
  # -----------
31
31
  # get indices
32
32
 
33
- lvectu = sorted({v0['vect'] for v0 in domain.values()})
33
+ lvectu = sorted({
34
+ v0['vect'] for v0 in domain.values()
35
+ if v0.get('vect') is not None
36
+ })
34
37
 
35
38
  for vv in lvectu:
36
39
 
37
- lk0 = [k0 for k0, v0 in domain.items() if v0['vect'] == vv]
40
+ lk0 = [k0 for k0, v0 in domain.items() if v0.get('vect') == vv]
38
41
  for k0 in lk0:
39
42
 
40
43
  if domain[k0].get('domain') is None:
@@ -61,12 +64,14 @@ def _check(
61
64
 
62
65
  # ---------
63
66
  # prepare
67
+ # ---------
64
68
 
65
69
  ldata = list(coll.ddata.keys())
66
70
  lref = list(coll.dref.keys())
67
71
 
68
72
  # ------------
69
73
  # domain
74
+ # ------------
70
75
 
71
76
  c0 = (
72
77
  isinstance(domain, dict)
@@ -80,24 +85,17 @@ def _check(
80
85
  )
81
86
  raise Exception(msg)
82
87
 
83
- # ------------
88
+ # --------------
84
89
  # check each key
90
+ # --------------
85
91
 
86
92
  dfail = {}
87
93
  domain = copy.deepcopy(domain)
88
94
  for k0, v0 in domain.items():
89
95
 
90
- # check ref vector
91
- kwd = {'ref': k0} if k0 in lref else {'key0': k0}
92
- hasref, hasvect, ref, vect = coll.get_ref_vector(**kwd)[:4]
93
- if not (hasref and ref is not None):
94
- dfail[k0] = "No associated ref identified!"
95
- continue
96
- if not (hasvect and vect is not None):
97
- dfail[k0] = "No associated ref vector identified!"
98
- continue
99
-
96
+ # -----------
100
97
  # v0 is dict
98
+
101
99
  ltyp = (list, tuple, np.ndarray)
102
100
  if isinstance(v0, ltyp):
103
101
  domain[k0] = {'domain': v0}
@@ -106,21 +104,42 @@ def _check(
106
104
 
107
105
  c0 = (
108
106
  isinstance(domain[k0], dict)
109
- and any(ss in ['ind', 'domain'] for ss in domain[k0].keys())
107
+ and any([ss in ['ind', 'domain'] for ss in domain[k0].keys()])
110
108
  and (
111
109
  isinstance(domain[k0].get('domain'), ltyp)
112
110
  or np.isscalar(domain[k0].get('domain', 0))
113
111
  )
114
- and isinstance(domain[k0].get('ind', np.r_[0]), np.ndarray)
112
+ and isinstance(domain[k0].get('ind', np.r_[0]), (np.ndarray, int))
115
113
  )
114
+
116
115
  if not c0:
117
116
  dfail[k0] = "must be a dict with keys ['ind', 'domain']"
118
117
  continue
119
118
 
119
+ # ----------------
120
+ # check ref vector
121
+
122
+ kwd = {'ref': k0} if k0 in lref else {'key0': k0}
123
+ hasref, hasvect, ref, vect = coll.get_ref_vector(**kwd)[:4]
124
+
125
+ if not (hasref and ref is not None):
126
+ dfail[k0] = "No associated ref identified!"
127
+ continue
128
+
120
129
  # vect
121
- domain[k0]['vect'] = vect
130
+ domain[k0]['ref'] = ref
122
131
 
132
+ if domain[k0].get('domain') is not None:
133
+ if not (hasvect and vect is not None):
134
+ dfail[k0] = "No associated ref vector identified!"
135
+ continue
136
+
137
+ # vect
138
+ domain[k0]['vect'] = vect
139
+
140
+ # -------
123
141
  # domain
142
+
124
143
  dom = domain[k0].get('domain')
125
144
  if dom is not None:
126
145
  dom, err = _check_domain(dom)
@@ -129,10 +148,15 @@ def _check(
129
148
  continue
130
149
  domain[k0]['domain'] = dom
131
150
 
151
+ # -----
132
152
  # ind
153
+
133
154
  ind = domain[k0].get('ind')
134
155
  if ind is not None:
135
- vsize = coll.ddata[vect]['data'].size
156
+ if np.isscalar(ind):
157
+ ind = np.array([ind], dtype=int)
158
+
159
+ vsize = coll.dref[ref]['size']
136
160
  if ind.dtype == bool:
137
161
  pass
138
162
  elif 'int' in ind.dtype.name:
@@ -151,12 +175,14 @@ def _check(
151
175
 
152
176
  # -----------
153
177
  # errors
178
+ # -----------
154
179
 
155
180
  if len(dfail) > 0:
156
181
  lstr = [f"\t- '{k0}': {v0}" for k0, v0 in dfail.items()]
157
182
  msg = (
158
183
  "The following domain keys / values are not conform:\n"
159
184
  + "\n".join(lstr)
185
+ + f"\nProvided:\n{domain}"
160
186
  )
161
187
  raise Exception(msg)
162
188
 
@@ -88,6 +88,7 @@ def interpolate(
88
88
  ddata, dout, dsh_other, sli_c, sli_x, sli_v,
89
89
  log_log, nan0, grid, ndim, xunique,
90
90
  returnas, return_params, store, inplace,
91
+ domain,
91
92
  ) = _check(
92
93
  coll=coll,
93
94
  # interpolation base
@@ -143,7 +144,7 @@ def interpolate(
143
144
  # adjust data and ref if xunique
144
145
 
145
146
  if xunique:
146
- _xunique(dout)
147
+ _xunique(dout, domain=domain)
147
148
 
148
149
  # --------
149
150
  # store
@@ -392,9 +393,9 @@ def _check(
392
393
  )
393
394
 
394
395
  # ---------------------
395
- # get dvect from domain
396
+ # get dref_dom from domain
396
397
 
397
- domain, dvect = _get_dvect(
398
+ domain, dref_dom = _get_drefdom(
398
399
  coll=coll,
399
400
  domain=domain,
400
401
  ref_key=ref_key,
@@ -407,7 +408,7 @@ def _check(
407
408
  coll=coll,
408
409
  keys=keys,
409
410
  ref_key=ref_key,
410
- dvect=dvect,
411
+ dref_dom=dref_dom,
411
412
  )
412
413
 
413
414
  # --------
@@ -422,7 +423,7 @@ def _check(
422
423
  )
423
424
 
424
425
  if ref_com is not None and domain is not None:
425
- if ref_com in [coll.ddata[k0]['ref'][0] for k0 in dvect.keys()]:
426
+ if ref_com in list(dref_dom.keys()):
426
427
  msg = (
427
428
  "Arg ref_com and domain cannot be applied to the same ref!\n"
428
429
  f"\t- ref_com: {ref_com}\n"
@@ -440,8 +441,10 @@ def _check(
440
441
  x0=x0,
441
442
  daxis=daxis,
442
443
  dunits=dunits,
444
+ # ref com
443
445
  dref_com=dref_com,
444
- dvect=dvect,
446
+ # domain
447
+ dref_dom=dref_dom,
445
448
  )
446
449
 
447
450
  # --------------
@@ -488,6 +491,7 @@ def _check(
488
491
  ddata, dout, dsh_other, sli_c, sli_x, sli_v,
489
492
  log_log, nan0, grid, ndim, xunique,
490
493
  returnas, return_params, store, inplace,
494
+ domain,
491
495
  )
492
496
 
493
497
 
@@ -963,56 +967,59 @@ def _x01_grid(
963
967
  return x0, x1, refx, ix, xunique
964
968
 
965
969
 
966
- def _get_dvect(
970
+ def _get_drefdom(
967
971
  coll=None,
968
972
  domain=None,
969
973
  ref_key=None,
970
974
  ):
971
975
  # ----------------
972
- # domain => dvect
976
+ # domain => dref_dom
977
+
978
+ lr_ref_key = [coll.ddata[kk]['ref'][0] for kk in ref_key]
973
979
 
974
980
  if domain is not None:
975
981
 
976
982
  # get domain
977
983
  domain = coll.get_domain_ref(domain)
978
984
 
979
- # derive dvect
980
- lvectu = sorted({
981
- v0['vect'] for v0 in domain.values() if v0['vect'] not in ref_key
985
+ # derive lrefu
986
+ lrefu = sorted({
987
+ v0['ref'] for v0 in domain.values() if v0['ref'] not in lr_ref_key
982
988
  })
983
989
 
984
- dvect = {
985
- k0: [k1 for k1, v1 in domain.items() if v1['vect'] == k0]
986
- for k0 in lvectu
990
+ # derive dref_dom
991
+ dref_dom = {
992
+ rr: [k1 for k1, v1 in domain.items() if v1['ref'] == rr]
993
+ for rr in lrefu
987
994
  }
988
995
 
989
996
  # check unicity of vect
990
- dfail = {k0: v0 for k0, v0 in dvect.items() if len(v0) > 1}
997
+ dfail = {k0: v0 for k0, v0 in dref_dom.items() if len(v0) > 1}
991
998
  if len(dfail) > 0:
992
999
  lstr = [f"\t- '{k0}': {v0}" for k0, v0 in dfail.items()]
993
1000
  msg = (
994
- "Some ref vector have been specified with multiple domains!\n"
1001
+ "Some ref have been specified with multiple domains!\n"
995
1002
  + "\n".join(lstr)
996
1003
  )
997
1004
  raise Exception(msg)
998
1005
 
999
- # build final dvect
1000
- dvect = {
1006
+ # build final dref_dom
1007
+ dref_dom = {
1001
1008
  k0: domain[v0[0]]['ind']
1002
- for k0, v0 in dvect.items()
1009
+ for k0, v0 in dref_dom.items()
1003
1010
  }
1004
1011
 
1005
1012
  else:
1006
- dvect = None
1013
+ dref_dom = None
1007
1014
 
1008
- return domain, dvect
1015
+ return domain, dref_dom
1009
1016
 
1010
1017
 
1011
1018
  def _get_ddata(
1012
1019
  coll=None,
1013
1020
  keys=None,
1014
1021
  ref_key=None,
1015
- dvect=None,
1022
+ dref_dom=None,
1016
1023
  ):
1017
1024
 
1018
1025
  # --------
@@ -1024,13 +1031,12 @@ def _get_ddata(
1024
1031
  data = coll.ddata[k0]['data']
1025
1032
 
1026
1033
  # apply domain
1027
- if dvect is not None:
1028
- for k1, v1 in dvect.items():
1029
- ref = coll.ddata[k1]['ref'][0]
1030
- if ref in coll.ddata[k0]['ref']:
1031
- ax = coll.ddata[k0]['ref'].index(ref)
1034
+ if dref_dom is not None:
1035
+ for rr, vr in dref_dom.items():
1036
+ if rr in coll.ddata[k0]['ref']:
1037
+ ax = coll.ddata[k0]['ref'].index(rr)
1032
1038
  sli = tuple([
1033
- v1 if ii == ax else slice(None)
1039
+ vr if ii == ax else slice(None)
1034
1040
  for ii in range(data.ndim)
1035
1041
  ])
1036
1042
  data = data[sli]
@@ -1050,7 +1056,7 @@ def _get_dout(
1050
1056
  # common refs
1051
1057
  dref_com=None,
1052
1058
  # domain
1053
- dvect=None,
1059
+ dref_dom=None,
1054
1060
  ):
1055
1061
 
1056
1062
  # -------------
@@ -1069,11 +1075,11 @@ def _get_dout(
1069
1075
  rd = list(coll.ddata[k0]['ref'])
1070
1076
 
1071
1077
  # apply domain
1072
- if dvect is not None:
1073
- for k1, v1 in dvect.items():
1074
- if coll.ddata[k1]['ref'][0] in rd:
1075
- ax = rd.index(coll.ddata[k1]['ref'][0])
1076
- sh[ax] = len(v1) if v1.dtype == int else v1.sum()
1078
+ if dref_dom is not None:
1079
+ for rr, vr in dref_dom.items():
1080
+ if rr in rd:
1081
+ ax = rd.index(rr)
1082
+ sh[ax] = len(vr) if vr.dtype == int else vr.sum()
1077
1083
  rd[ax] = None
1078
1084
 
1079
1085
  # ------------------------
@@ -1556,7 +1562,7 @@ def _interp2d(
1556
1562
  # ###############################################################
1557
1563
 
1558
1564
 
1559
- def _xunique(dout=None):
1565
+ def _xunique(dout=None, domain=None):
1560
1566
  """ interpolation on a single point => eliminates a ref """
1561
1567
 
1562
1568
  # ----------
@@ -1567,13 +1573,17 @@ def _xunique(dout=None):
1567
1573
  for k0, v0 in dout.items()
1568
1574
  }
1569
1575
 
1570
- dwrong = {k0: v0 for k0, v0 in dind.items() if len(v0) != 1}
1576
+ # Number of Nones expected
1577
+ nNone = 1 + len(domain)
1578
+
1579
+ # check
1580
+ dwrong = {k0: v0 for k0, v0 in dind.items() if len(v0) != nNone}
1571
1581
  if len(dwrong) > 0:
1572
1582
  lstr = [
1573
1583
  f"\t- {k0}: {dout[k0]['ref']} => {v0}" for k0, v0 in dwrong.items()
1574
1584
  ]
1575
1585
  msg = (
1576
- "Interpolation at unique point => ref should have one None:\n"
1586
+ "Interpolate unique pt => ref should have nNone = 1 + {len(domain)}:\n"
1577
1587
  + "\n".join(lstr)
1578
1588
  )
1579
1589
  raise Exception(msg)
@@ -199,6 +199,7 @@ def save(
199
199
  def load(
200
200
  pfe=None,
201
201
  cls=None,
202
+ coll=None,
202
203
  allow_pickle=None,
203
204
  sep=None,
204
205
  verb=None,
@@ -206,16 +207,24 @@ def load(
206
207
 
207
208
  # -------------
208
209
  # check inputs
210
+ # -------------
209
211
 
212
+ # ---------
210
213
  # pfe
214
+
211
215
  if not os.path.isfile(pfe):
212
216
  msg = f"Arg pfe must be a valid path to a file!\n\t- Provided: {pfe}"
213
217
  raise Exception(msg)
214
218
 
215
- # cls
216
- if cls is None:
217
- from ._class import DataStock
218
- cls = DataStock
219
+ # --------------
220
+ # cls vs coll
221
+
222
+ if coll is None:
223
+ if cls is None:
224
+ from ._class import DataStock
225
+ cls = DataStock
226
+ else:
227
+ cls = coll.__class__
219
228
 
220
229
  if not (type(cls) is type and hasattr(cls, 'from_dict')):
221
230
  msg = (
@@ -224,14 +233,18 @@ def load(
224
233
  )
225
234
  raise Exception(msg)
226
235
 
236
+ # ------------
227
237
  # allow_pickle
238
+
228
239
  allow_pickle = _generic_check._check_var(
229
240
  allow_pickle, 'allow_pickle',
230
241
  default=True,
231
242
  types=bool,
232
243
  )
233
244
 
245
+ # -------
234
246
  # verb
247
+
235
248
  verb = _generic_check._check_var(
236
249
  verb, 'verb',
237
250
  default=True,
@@ -240,11 +253,13 @@ def load(
240
253
 
241
254
  # --------------
242
255
  # load flat dict
256
+ # --------------
243
257
 
244
258
  dflat = dict(np.load(pfe, allow_pickle=allow_pickle))
245
259
 
246
260
  # ------------------------------
247
261
  # load sep from file if exists
262
+ # ------------------------------
248
263
 
249
264
  if _KEY_SEP in dflat.keys():
250
265
  # new
@@ -256,6 +271,7 @@ def load(
256
271
 
257
272
  # ----------
258
273
  # reshape
274
+ # ----------
259
275
 
260
276
  dout = {}
261
277
  for k0, v0 in dflat.items():
@@ -310,14 +326,19 @@ def load(
310
326
 
311
327
  # -----------
312
328
  # Instanciate
329
+ # -----------
313
330
 
314
- obj = cls.from_dict(dout)
331
+ coll = cls.from_dict(dout, obj=coll)
332
+
333
+ # -----------
334
+ # verb
335
+ # -----------
315
336
 
316
337
  if verb:
317
338
  msg = f"Loaded from\n\t{pfe}"
318
339
  print(msg)
319
340
 
320
- return obj
341
+ return coll
321
342
 
322
343
 
323
344
  # #################################################################
@@ -612,6 +612,16 @@ class Test02_Manipulate():
612
612
  st2 = load(pfe, verb=verb)
613
613
  # Just to check the loaded version works fine
614
614
  msg = st2.__eq__(self.st, returnas=str)
615
+ if msg is not True:
616
+ raise Exception(msg)
617
+ os.remove(pfe)
618
+
619
+ def test25_saveload_coll(self, verb=False):
620
+ pfe = self.st.save(path=_PATH_OUTPUT, verb=verb, return_pfe=True)
621
+ st = DataStock()
622
+ st2 = load(pfe, coll=st, verb=verb)
623
+ # Just to check the loaded version works fine
624
+ msg = st2.__eq__(self.st, returnas=str)
615
625
  if msg is not True:
616
626
  raise Exception(msg)
617
627
  os.remove(pfe)
@@ -1,2 +1,2 @@
1
1
  # Do not edit, pipeline versioning governed by git tags!
2
- __version__ = '0.0.43'
2
+ __version__ = '0.0.45'
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: datastock
3
- Version: 0.0.43
3
+ Version: 0.0.45
4
4
  Summary: A python library for generic class and data handling
5
5
  Home-page: https://github.com/ToFuProject/datastock
6
6
  Author: Didier VEZINET
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes