relib 1.2.0__tar.gz → 1.2.2__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,4 +1,4 @@
1
- Copyright 2023 Hampus Hallman
1
+ Copyright 2018-2025 Hampus Hallman
2
2
 
3
3
  Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
4
4
 
@@ -1,13 +1,14 @@
1
- Metadata-Version: 2.3
1
+ Metadata-Version: 2.4
2
2
  Name: relib
3
- Version: 1.2.0
3
+ Version: 1.2.2
4
4
  Project-URL: Repository, https://github.com/Reddan/relib.git
5
5
  Author: Hampus Hallman
6
- License: Copyright 2023 Hampus Hallman
6
+ License: Copyright 2018-2025 Hampus Hallman
7
7
 
8
8
  Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
9
9
 
10
10
  The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
11
11
 
12
12
  THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
13
+ License-File: LICENSE
13
14
  Requires-Python: >=3.12
@@ -1,6 +1,6 @@
1
1
  [project]
2
2
  name = "relib"
3
- version = "1.2.0"
3
+ version = "1.2.2"
4
4
  requires-python = ">=3.12"
5
5
  dependencies = []
6
6
  authors = [
@@ -13,6 +13,12 @@ license = {file = "LICENSE"}
13
13
  [project.urls]
14
14
  Repository = "https://github.com/Reddan/relib.git"
15
15
 
16
+ [dependency-groups]
17
+ dev = [
18
+ "numpy>=2.1.3",
19
+ "pandas>=2.2.3",
20
+ ]
21
+
16
22
  [build-system]
17
23
  requires = ["hatchling"]
18
24
  build-backend = "hatchling.build"
@@ -1,6 +1,6 @@
1
1
  from .utils import (
2
- clear_console,
3
2
  non_none,
3
+ as_any,
4
4
  list_split,
5
5
  drop_none,
6
6
  distinct,
@@ -25,11 +25,13 @@ from .utils import (
25
25
  group,
26
26
  reversed_enumerate,
27
27
  get_at,
28
+ for_each,
28
29
  sized_partitions,
29
30
  num_partitions,
30
31
  df_from_array,
31
32
  StrFilter,
32
33
  str_filterer,
33
34
  )
34
- from .hashing import hash
35
+ from .system import read_json, write_json, clear_console, console_link
36
+ from .hashing import hash, hash_obj
35
37
  from .measure_duration import measure_duration
@@ -0,0 +1,179 @@
1
+ # Author: Gael Varoquaux <gael dot varoquaux at normalesup dot org>
2
+ # Copyright (c) 2009 Gael Varoquaux
3
+ # License: BSD Style, 3 clauses.
4
+
5
+ import pickle
6
+ import hashlib
7
+ import sys
8
+ import types
9
+ import io
10
+ import decimal
11
+
12
+ try:
13
+ import numpy
14
+ except:
15
+ has_numpy = False
16
+ else:
17
+ has_numpy = True
18
+
19
+ Pickler = pickle._Pickler
20
+
21
+
22
+ class _ConsistentSet(object):
23
+ def __init__(self, set_sequence):
24
+ try:
25
+ self._sequence = sorted(set_sequence)
26
+ except (TypeError, decimal.InvalidOperation):
27
+ self._sequence = sorted(map(hash_obj, set_sequence))
28
+
29
+
30
+ class _MyHash(object):
31
+ """ Class used to hash objects that won't normally pickle """
32
+
33
+ def __init__(self, *args):
34
+ self.args = args
35
+
36
+
37
+ class Hasher(Pickler):
38
+ """ A subclass of pickler, to do cryptographic hashing, rather than pickling. """
39
+
40
+ def __init__(self, hash_name="md5"):
41
+ self.stream = io.BytesIO()
42
+ # We want a pickle protocol that only changes with major Python versions
43
+ protocol = pickle.HIGHEST_PROTOCOL
44
+ Pickler.__init__(self, self.stream, protocol=protocol)
45
+ self._hash = hashlib.new(hash_name)
46
+
47
+ def hash(self, obj) -> str:
48
+ try:
49
+ self.dump(obj)
50
+ except pickle.PicklingError as e:
51
+ e.args += ("PicklingError while hashing %r: %r" % (obj, e),)
52
+ raise
53
+ dumps = self.stream.getvalue()
54
+ self._hash.update(dumps)
55
+ return self._hash.hexdigest()
56
+
57
+ def save(self, obj):
58
+ if isinstance(obj, (types.MethodType, type({}.pop))):
59
+ # the Pickler cannot pickle instance methods; here we decompose
60
+ # them into components that make them uniquely identifiable
61
+ if hasattr(obj, "__func__"):
62
+ func_name = obj.__func__.__name__
63
+ else:
64
+ func_name = obj.__name__
65
+ inst = obj.__self__
66
+ if type(inst) == type(pickle):
67
+ obj = _MyHash(func_name, inst.__name__)
68
+ elif inst is None:
69
+ # type(None) or type(module) do not pickle
70
+ obj = _MyHash(func_name, inst)
71
+ else:
72
+ cls = obj.__self__.__class__
73
+ obj = _MyHash(func_name, inst, cls)
74
+ Pickler.save(self, obj)
75
+
76
+ def memoize(self, obj):
77
+ # We want hashing to be sensitive to value instead of reference.
78
+ # For example we want ["aa", "aa"] and ["aa", "aaZ"[:2]]
79
+ # to hash to the same value and that's why we disable memoization
80
+ # for strings
81
+ if isinstance(obj, (bytes, str)):
82
+ return
83
+ Pickler.memoize(self, obj)
84
+
85
+ # The dispatch table of the pickler is not accessible in Python
86
+ # 3, as these lines are only bugware for IPython, we skip them.
87
+ def save_global(self, obj, name=None):
88
+ # We have to override this method in order to deal with objects
89
+ # defined interactively in IPython that are not injected in
90
+ # __main__
91
+ try:
92
+ Pickler.save_global(self, obj, name=name)
93
+ except pickle.PicklingError:
94
+ Pickler.save_global(self, obj, name=name)
95
+ module = getattr(obj, "__module__", None)
96
+ if module == "__main__":
97
+ my_name = name
98
+ if my_name is None:
99
+ my_name = obj.__name__
100
+ mod = sys.modules[module]
101
+ if not hasattr(mod, my_name):
102
+ # IPython doesn't inject the variables define
103
+ # interactively in __main__
104
+ setattr(mod, my_name, obj)
105
+
106
+ def _batch_setitems(self, items):
107
+ try:
108
+ Pickler._batch_setitems(self, iter(sorted(items)))
109
+ except TypeError:
110
+ Pickler._batch_setitems(self, iter(sorted((hash_obj(k), v) for k, v in items)))
111
+
112
+ def save_set(self, set_items):
113
+ Pickler.save(self, _ConsistentSet(set_items))
114
+
115
+ dispatch = Pickler.dispatch.copy()
116
+ dispatch[type(len)] = save_global # builtin
117
+ dispatch[type(object)] = save_global # type
118
+ dispatch[type(Pickler)] = save_global # classobj
119
+ dispatch[type(pickle.dump)] = save_global # function
120
+ dispatch[type(set())] = save_set
121
+
122
+
123
+ class NumpyHasher(Hasher):
124
+ def __init__(self, hash_name="md5"):
125
+ Hasher.__init__(self, hash_name=hash_name)
126
+
127
+ def save(self, obj):
128
+ """ Subclass the save method, to hash ndarray subclass, rather
129
+ than pickling them. Off course, this is a total abuse of
130
+ the Pickler class.
131
+ """
132
+ import numpy as np
133
+
134
+ if isinstance(obj, np.ndarray) and not obj.dtype.hasobject:
135
+ # Compute a hash of the object
136
+ # The update function of the hash requires a c_contiguous buffer.
137
+ if obj.shape == ():
138
+ # 0d arrays need to be flattened because viewing them as bytes
139
+ # raises a ValueError exception.
140
+ obj_c_contiguous = obj.flatten()
141
+ elif obj.flags.c_contiguous:
142
+ obj_c_contiguous = obj
143
+ elif obj.flags.f_contiguous:
144
+ obj_c_contiguous = obj.T
145
+ else:
146
+ # Cater for non-single-segment arrays: this creates a
147
+ # copy, and thus aleviates this issue.
148
+ # XXX: There might be a more efficient way of doing this
149
+ obj_c_contiguous = obj.flatten()
150
+
151
+ # View the array as bytes to support dtypes like datetime64
152
+ self._hash.update(memoryview(obj_c_contiguous.view(np.uint8)))
153
+
154
+ # The object will be pickled by the pickler hashed at the end.
155
+ obj = (obj.__class__, ("HASHED", obj.dtype, obj.shape, obj.strides))
156
+ elif isinstance(obj, np.dtype):
157
+ # Atomic dtype objects are interned by their default constructor:
158
+ # np.dtype("f8") is np.dtype("f8")
159
+ # This interning is not maintained by a
160
+ # pickle.loads + pickle.dumps cycle, because __reduce__
161
+ # uses copy=True in the dtype constructor. This
162
+ # non-deterministic behavior causes the internal memoizer
163
+ # of the hasher to generate different hash values
164
+ # depending on the history of the dtype object.
165
+ # To prevent the hash from being sensitive to this, we use
166
+ # .descr which is a full (and never interned) description of
167
+ # the array dtype according to the numpy doc.
168
+ obj = (obj.__class__, ("HASHED", obj.descr))
169
+
170
+ Hasher.save(self, obj)
171
+
172
+
173
+ def hash_obj(obj, hash_name="md5") -> str:
174
+ if has_numpy:
175
+ return NumpyHasher(hash_name=hash_name).hash(obj)
176
+ else:
177
+ return Hasher(hash_name=hash_name).hash(obj)
178
+
179
+ hash = hash_obj
@@ -0,0 +1,18 @@
1
+ import json
2
+ import os
3
+ from pathlib import Path
4
+ from typing import Any
5
+
6
+ def read_json(path: Path) -> Any:
7
+ with path.open("r") as f:
8
+ return json.load(f)
9
+
10
+ def write_json(path: Path, obj: Any) -> None:
11
+ with path.open("w") as f:
12
+ return json.dump(obj, f)
13
+
14
+ def clear_console() -> None:
15
+ os.system("cls" if os.name == "nt" else "clear")
16
+
17
+ def console_link(text, url) -> str:
18
+ return f"\033]8;;{url}\033\\{text}\033]8;;\033\\"
@@ -1,21 +1,15 @@
1
- import os
2
1
  import re
3
- from typing import TypeVar, Iterable, Callable, Any, cast, overload
2
+ from typing import Iterable, Callable, Any, overload
4
3
  from itertools import chain
5
4
 
6
- T = TypeVar('T')
7
- U = TypeVar('U')
8
- K = TypeVar('K')
9
- K1, K2, K3, K4, K5, K6 = TypeVar('K1'), TypeVar('K2'), TypeVar('K3'), TypeVar('K4'), TypeVar('K5'), TypeVar('K6')
10
-
11
- def clear_console():
12
- os.system("cls" if os.name == "nt" else "clear")
13
-
14
- def non_none(obj: T | None) -> T:
5
+ def non_none[T](obj: T | None) -> T:
15
6
  assert obj is not None
16
7
  return obj
17
8
 
18
- def list_split(l: list[T], sep: T) -> list[list[T]]:
9
+ def as_any(obj: Any) -> Any:
10
+ return obj
11
+
12
+ def list_split[T](l: list[T], sep: T) -> list[list[T]]:
19
13
  l = [sep, *l, sep]
20
14
  split_at = [i for i, x in enumerate(l) if x is sep]
21
15
  ranges = list(zip(split_at[0:-1], split_at[1:]))
@@ -24,17 +18,17 @@ def list_split(l: list[T], sep: T) -> list[list[T]]:
24
18
  for start, end in ranges
25
19
  ]
26
20
 
27
- def drop_none(l: Iterable[T | None]) -> list[T]:
28
- return [x for x in l if x is not None]
21
+ def drop_none[T](iterable: Iterable[T | None]) -> list[T]:
22
+ return [x for x in iterable if x is not None]
29
23
 
30
- def distinct(items: Iterable[T]) -> list[T]:
31
- return list(set(items))
24
+ def distinct[T](items: Iterable[T]) -> list[T]:
25
+ return list(dict.fromkeys(items))
32
26
 
33
- def first(iterable: Iterable[T]) -> T | None:
27
+ def first[T](iterable: Iterable[T]) -> T | None:
34
28
  return next(iter(iterable), None)
35
29
 
36
- def move_value(l: Iterable[T], from_i: int, to_i: int) -> list[T]:
37
- l = list(l)
30
+ def move_value[T](iterable: Iterable[T], from_i: int, to_i: int) -> list[T]:
31
+ l = list(iterable)
38
32
  l.insert(to_i, l.pop(from_i))
39
33
  return l
40
34
 
@@ -53,7 +47,7 @@ def transpose_dict(des):
53
47
  {key: des[key][i] for key in keys}
54
48
  for i in range(length)
55
49
  ]
56
- raise ValueError('transpose_dict only accepts dict or list')
50
+ raise ValueError("transpose_dict only accepts dict or list")
57
51
 
58
52
  def make_combinations_by_dict(des, keys=None, pairs=[]):
59
53
  keys = sorted(des.keys()) if keys == None else keys
@@ -67,7 +61,7 @@ def make_combinations_by_dict(des, keys=None, pairs=[]):
67
61
  for pair in new_pairs
68
62
  ])
69
63
 
70
- def merge_dicts(*dicts: dict[K, T]) -> dict[K, T]:
64
+ def merge_dicts[T, K](*dicts: dict[K, T]) -> dict[K, T]:
71
65
  if len(dicts) == 1:
72
66
  return dicts[0]
73
67
  result = {}
@@ -75,33 +69,33 @@ def merge_dicts(*dicts: dict[K, T]) -> dict[K, T]:
75
69
  result.update(d)
76
70
  return result
77
71
 
78
- def intersect(*lists: Iterable[T]) -> list[T]:
79
- return list(set.intersection(*map(set, lists)))
72
+ def intersect[T](*iterables: Iterable[T]) -> list[T]:
73
+ return list(set.intersection(*map(set, iterables)))
80
74
 
81
- def ensure_tuple(value: T | tuple[T, ...]) -> tuple[T, ...]:
75
+ def ensure_tuple[T](value: T | tuple[T, ...]) -> tuple[T, ...]:
82
76
  return value if isinstance(value, tuple) else (value,)
83
77
 
84
- def key_of(dicts: Iterable[dict[T, U]], key: T) -> list[U]:
78
+ def key_of[T, U](dicts: Iterable[dict[T, U]], key: T) -> list[U]:
85
79
  return [d[key] for d in dicts]
86
80
 
87
- def omit(d: dict[K, T], keys: Iterable[K]) -> dict[K, T]:
81
+ def omit[T, K](d: dict[K, T], keys: Iterable[K]) -> dict[K, T]:
88
82
  if keys:
89
83
  d = dict(d)
90
84
  for key in keys:
91
85
  del d[key]
92
86
  return d
93
87
 
94
- def pick(d: dict[K, T], keys: Iterable[K]) -> dict[K, T]:
88
+ def pick[T, K](d: dict[K, T], keys: Iterable[K]) -> dict[K, T]:
95
89
  return {key: d[key] for key in keys}
96
90
 
97
- def dict_by(keys: Iterable[K], values: Iterable[T]) -> dict[K, T]:
91
+ def dict_by[T, K](keys: Iterable[K], values: Iterable[T]) -> dict[K, T]:
98
92
  return dict(zip(keys, values))
99
93
 
100
- def tuple_by(d: dict[K, T], keys: Iterable[K]) -> tuple[T, ...]:
94
+ def tuple_by[T, K](d: dict[K, T], keys: Iterable[K]) -> tuple[T, ...]:
101
95
  return tuple(d[key] for key in keys)
102
96
 
103
- def flatten(l: Iterable[Iterable[T]]) -> list[T]:
104
- return list(chain.from_iterable(l))
97
+ def flatten[T](iterable: Iterable[Iterable[T]]) -> list[T]:
98
+ return list(chain.from_iterable(iterable))
105
99
 
106
100
  def transpose(tuples, default_num_returns=0):
107
101
  output = tuple(zip(*tuples))
@@ -109,27 +103,21 @@ def transpose(tuples, default_num_returns=0):
109
103
  return ([],) * default_num_returns
110
104
  return tuple(map(list, output))
111
105
 
112
- def map_dict(fn: Callable[[T], U], d: dict[K, T]) -> dict[K, U]:
106
+ def map_dict[T, U, K](fn: Callable[[T], U], d: dict[K, T]) -> dict[K, U]:
113
107
  return {key: fn(value) for key, value in d.items()}
114
108
 
115
109
  @overload
116
- def deepen_dict(d: dict[tuple[K1], U]) -> dict[K1, U]: ...
117
-
110
+ def deepen_dict[K1, U](d: dict[tuple[K1], U]) -> dict[K1, U]: ...
118
111
  @overload
119
- def deepen_dict(d: dict[tuple[K1, K2], U]) -> dict[K1, dict[K2, U]]: ...
120
-
112
+ def deepen_dict[K1, K2, U](d: dict[tuple[K1, K2], U]) -> dict[K1, dict[K2, U]]: ...
121
113
  @overload
122
- def deepen_dict(d: dict[tuple[K1, K2, K3], U]) -> dict[K1, dict[K2, dict[K3, U]]]: ...
123
-
114
+ def deepen_dict[K1, K2, K3, U](d: dict[tuple[K1, K2, K3], U]) -> dict[K1, dict[K2, dict[K3, U]]]: ...
124
115
  @overload
125
- def deepen_dict(d: dict[tuple[K1, K2, K3, K4], U]) -> dict[K1, dict[K2, dict[K3, dict[K4, U]]]]: ...
126
-
116
+ def deepen_dict[K1, K2, K3, K4, U](d: dict[tuple[K1, K2, K3, K4], U]) -> dict[K1, dict[K2, dict[K3, dict[K4, U]]]]: ...
127
117
  @overload
128
- def deepen_dict(d: dict[tuple[K1, K2, K3, K4, K5], U]) -> dict[K1, dict[K2, dict[K3, dict[K4, dict[K5, U]]]]]: ...
129
-
118
+ def deepen_dict[K1, K2, K3, K4, K5, U](d: dict[tuple[K1, K2, K3, K4, K5], U]) -> dict[K1, dict[K2, dict[K3, dict[K4, dict[K5, U]]]]]: ...
130
119
  @overload
131
- def deepen_dict(d: dict[tuple[K1, K2, K3, K4, K5, K6], U]) -> dict[K1, dict[K2, dict[K3, dict[K4, dict[K5, dict[K6, U]]]]]]: ...
132
-
120
+ def deepen_dict[K1, K2, K3, K4, K5, K6, U](d: dict[tuple[K1, K2, K3, K4, K5, K6], U]) -> dict[K1, dict[K2, dict[K3, dict[K4, dict[K5, dict[K6, U]]]]]]: ...
133
121
  def deepen_dict(d: dict[tuple[Any, ...], Any]) -> dict:
134
122
  output = {}
135
123
  if () in d:
@@ -151,31 +139,35 @@ def flatten_dict_inner(d, prefix=()):
151
139
  def flatten_dict(deep_dict: dict, prefix=()) -> dict:
152
140
  return dict(flatten_dict_inner(deep_dict, prefix))
153
141
 
154
- def group(pairs: Iterable[tuple[K, T]]) -> dict[K, list[T]]:
142
+ def group[T, K](pairs: Iterable[tuple[K, T]]) -> dict[K, list[T]]:
155
143
  values_by_key = {}
156
144
  for key, value in pairs:
157
145
  values_by_key.setdefault(key, []).append(value)
158
146
  return values_by_key
159
147
 
160
- def reversed_enumerate(l: list[T] | tuple[T, ...]) -> Iterable[tuple[int, T]]:
148
+ def reversed_enumerate[T](l: list[T] | tuple[T, ...]) -> Iterable[tuple[int, T]]:
161
149
  return zip(reversed(range(len(l))), reversed(l))
162
150
 
163
- def get_at(d: dict, keys: Iterable[Any], default: T) -> T:
151
+ def get_at[T](d: dict, keys: Iterable[Any], default: T) -> T:
164
152
  try:
165
153
  for key in keys:
166
154
  d = d[key]
167
155
  except KeyError:
168
156
  return default
169
- return cast(Any, d)
157
+ return as_any(d)
158
+
159
+ def for_each[T](func: Callable[[T]], iterable: Iterable[T]) -> None:
160
+ for item in iterable:
161
+ func(item)
170
162
 
171
- def sized_partitions(values: Iterable[T], part_size: int) -> list[list[T]]:
163
+ def sized_partitions[T](values: Iterable[T], part_size: int) -> list[list[T]]:
172
164
  # "chunk"
173
165
  if not isinstance(values, list):
174
166
  values = list(values)
175
167
  num_parts = (len(values) / part_size).__ceil__()
176
168
  return [values[i * part_size:(i + 1) * part_size] for i in range(num_parts)]
177
169
 
178
- def num_partitions(values: Iterable[T], num_parts: int) -> list[list[T]]:
170
+ def num_partitions[T](values: Iterable[T], num_parts: int) -> list[list[T]]:
179
171
  if not isinstance(values, list):
180
172
  values = list(values)
181
173
  part_size = (len(values) / num_parts).__ceil__()
relib-1.2.2/uv.lock ADDED
@@ -0,0 +1,132 @@
1
+ version = 1
2
+ requires-python = ">=3.12"
3
+
4
+ [[package]]
5
+ name = "numpy"
6
+ version = "2.1.3"
7
+ source = { registry = "https://pypi.org/simple" }
8
+ sdist = { url = "https://files.pythonhosted.org/packages/25/ca/1166b75c21abd1da445b97bf1fa2f14f423c6cfb4fc7c4ef31dccf9f6a94/numpy-2.1.3.tar.gz", hash = "sha256:aa08e04e08aaf974d4458def539dece0d28146d866a39da5639596f4921fd761", size = 20166090 }
9
+ wheels = [
10
+ { url = "https://files.pythonhosted.org/packages/8a/f0/385eb9970309643cbca4fc6eebc8bb16e560de129c91258dfaa18498da8b/numpy-2.1.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f55ba01150f52b1027829b50d70ef1dafd9821ea82905b63936668403c3b471e", size = 20849658 },
11
+ { url = "https://files.pythonhosted.org/packages/54/4a/765b4607f0fecbb239638d610d04ec0a0ded9b4951c56dc68cef79026abf/numpy-2.1.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:13138eadd4f4da03074851a698ffa7e405f41a0845a6b1ad135b81596e4e9958", size = 13492258 },
12
+ { url = "https://files.pythonhosted.org/packages/bd/a7/2332679479c70b68dccbf4a8eb9c9b5ee383164b161bee9284ac141fbd33/numpy-2.1.3-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:a6b46587b14b888e95e4a24d7b13ae91fa22386c199ee7b418f449032b2fa3b8", size = 5090249 },
13
+ { url = "https://files.pythonhosted.org/packages/c1/67/4aa00316b3b981a822c7a239d3a8135be2a6945d1fd11d0efb25d361711a/numpy-2.1.3-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:0fa14563cc46422e99daef53d725d0c326e99e468a9320a240affffe87852564", size = 6621704 },
14
+ { url = "https://files.pythonhosted.org/packages/5e/da/1a429ae58b3b6c364eeec93bf044c532f2ff7b48a52e41050896cf15d5b1/numpy-2.1.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8637dcd2caa676e475503d1f8fdb327bc495554e10838019651b76d17b98e512", size = 13606089 },
15
+ { url = "https://files.pythonhosted.org/packages/9e/3e/3757f304c704f2f0294a6b8340fcf2be244038be07da4cccf390fa678a9f/numpy-2.1.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2312b2aa89e1f43ecea6da6ea9a810d06aae08321609d8dc0d0eda6d946a541b", size = 16043185 },
16
+ { url = "https://files.pythonhosted.org/packages/43/97/75329c28fea3113d00c8d2daf9bc5828d58d78ed661d8e05e234f86f0f6d/numpy-2.1.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a38c19106902bb19351b83802531fea19dee18e5b37b36454f27f11ff956f7fc", size = 16410751 },
17
+ { url = "https://files.pythonhosted.org/packages/ad/7a/442965e98b34e0ae9da319f075b387bcb9a1e0658276cc63adb8c9686f7b/numpy-2.1.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:02135ade8b8a84011cbb67dc44e07c58f28575cf9ecf8ab304e51c05528c19f0", size = 14082705 },
18
+ { url = "https://files.pythonhosted.org/packages/ac/b6/26108cf2cfa5c7e03fb969b595c93131eab4a399762b51ce9ebec2332e80/numpy-2.1.3-cp312-cp312-win32.whl", hash = "sha256:e6988e90fcf617da2b5c78902fe8e668361b43b4fe26dbf2d7b0f8034d4cafb9", size = 6239077 },
19
+ { url = "https://files.pythonhosted.org/packages/a6/84/fa11dad3404b7634aaab50733581ce11e5350383311ea7a7010f464c0170/numpy-2.1.3-cp312-cp312-win_amd64.whl", hash = "sha256:0d30c543f02e84e92c4b1f415b7c6b5326cbe45ee7882b6b77db7195fb971e3a", size = 12566858 },
20
+ { url = "https://files.pythonhosted.org/packages/4d/0b/620591441457e25f3404c8057eb924d04f161244cb8a3680d529419aa86e/numpy-2.1.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:96fe52fcdb9345b7cd82ecd34547fca4321f7656d500eca497eb7ea5a926692f", size = 20836263 },
21
+ { url = "https://files.pythonhosted.org/packages/45/e1/210b2d8b31ce9119145433e6ea78046e30771de3fe353f313b2778142f34/numpy-2.1.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f653490b33e9c3a4c1c01d41bc2aef08f9475af51146e4a7710c450cf9761598", size = 13507771 },
22
+ { url = "https://files.pythonhosted.org/packages/55/44/aa9ee3caee02fa5a45f2c3b95cafe59c44e4b278fbbf895a93e88b308555/numpy-2.1.3-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:dc258a761a16daa791081d026f0ed4399b582712e6fc887a95af09df10c5ca57", size = 5075805 },
23
+ { url = "https://files.pythonhosted.org/packages/78/d6/61de6e7e31915ba4d87bbe1ae859e83e6582ea14c6add07c8f7eefd8488f/numpy-2.1.3-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:016d0f6f5e77b0f0d45d77387ffa4bb89816b57c835580c3ce8e099ef830befe", size = 6608380 },
24
+ { url = "https://files.pythonhosted.org/packages/3e/46/48bdf9b7241e317e6cf94276fe11ba673c06d1fdf115d8b4ebf616affd1a/numpy-2.1.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c181ba05ce8299c7aa3125c27b9c2167bca4a4445b7ce73d5febc411ca692e43", size = 13602451 },
25
+ { url = "https://files.pythonhosted.org/packages/70/50/73f9a5aa0810cdccda9c1d20be3cbe4a4d6ea6bfd6931464a44c95eef731/numpy-2.1.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5641516794ca9e5f8a4d17bb45446998c6554704d888f86df9b200e66bdcce56", size = 16039822 },
26
+ { url = "https://files.pythonhosted.org/packages/ad/cd/098bc1d5a5bc5307cfc65ee9369d0ca658ed88fbd7307b0d49fab6ca5fa5/numpy-2.1.3-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ea4dedd6e394a9c180b33c2c872b92f7ce0f8e7ad93e9585312b0c5a04777a4a", size = 16411822 },
27
+ { url = "https://files.pythonhosted.org/packages/83/a2/7d4467a2a6d984549053b37945620209e702cf96a8bc658bc04bba13c9e2/numpy-2.1.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b0df3635b9c8ef48bd3be5f862cf71b0a4716fa0e702155c45067c6b711ddcef", size = 14079598 },
28
+ { url = "https://files.pythonhosted.org/packages/e9/6a/d64514dcecb2ee70bfdfad10c42b76cab657e7ee31944ff7a600f141d9e9/numpy-2.1.3-cp313-cp313-win32.whl", hash = "sha256:50ca6aba6e163363f132b5c101ba078b8cbd3fa92c7865fd7d4d62d9779ac29f", size = 6236021 },
29
+ { url = "https://files.pythonhosted.org/packages/bb/f9/12297ed8d8301a401e7d8eb6b418d32547f1d700ed3c038d325a605421a4/numpy-2.1.3-cp313-cp313-win_amd64.whl", hash = "sha256:747641635d3d44bcb380d950679462fae44f54b131be347d5ec2bce47d3df9ed", size = 12560405 },
30
+ { url = "https://files.pythonhosted.org/packages/a7/45/7f9244cd792e163b334e3a7f02dff1239d2890b6f37ebf9e82cbe17debc0/numpy-2.1.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:996bb9399059c5b82f76b53ff8bb686069c05acc94656bb259b1d63d04a9506f", size = 20859062 },
31
+ { url = "https://files.pythonhosted.org/packages/b1/b4/a084218e7e92b506d634105b13e27a3a6645312b93e1c699cc9025adb0e1/numpy-2.1.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:45966d859916ad02b779706bb43b954281db43e185015df6eb3323120188f9e4", size = 13515839 },
32
+ { url = "https://files.pythonhosted.org/packages/27/45/58ed3f88028dcf80e6ea580311dc3edefdd94248f5770deb980500ef85dd/numpy-2.1.3-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:baed7e8d7481bfe0874b566850cb0b85243e982388b7b23348c6db2ee2b2ae8e", size = 5116031 },
33
+ { url = "https://files.pythonhosted.org/packages/37/a8/eb689432eb977d83229094b58b0f53249d2209742f7de529c49d61a124a0/numpy-2.1.3-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:a9f7f672a3388133335589cfca93ed468509cb7b93ba3105fce780d04a6576a0", size = 6629977 },
34
+ { url = "https://files.pythonhosted.org/packages/42/a3/5355ad51ac73c23334c7caaed01adadfda49544f646fcbfbb4331deb267b/numpy-2.1.3-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7aac50327da5d208db2eec22eb11e491e3fe13d22653dce51b0f4109101b408", size = 13575951 },
35
+ { url = "https://files.pythonhosted.org/packages/c4/70/ea9646d203104e647988cb7d7279f135257a6b7e3354ea6c56f8bafdb095/numpy-2.1.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4394bc0dbd074b7f9b52024832d16e019decebf86caf909d94f6b3f77a8ee3b6", size = 16022655 },
36
+ { url = "https://files.pythonhosted.org/packages/14/ce/7fc0612903e91ff9d0b3f2eda4e18ef9904814afcae5b0f08edb7f637883/numpy-2.1.3-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:50d18c4358a0a8a53f12a8ba9d772ab2d460321e6a93d6064fc22443d189853f", size = 16399902 },
37
+ { url = "https://files.pythonhosted.org/packages/ef/62/1d3204313357591c913c32132a28f09a26357e33ea3c4e2fe81269e0dca1/numpy-2.1.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:14e253bd43fc6b37af4921b10f6add6925878a42a0c5fe83daee390bca80bc17", size = 14067180 },
38
+ { url = "https://files.pythonhosted.org/packages/24/d7/78a40ed1d80e23a774cb8a34ae8a9493ba1b4271dde96e56ccdbab1620ef/numpy-2.1.3-cp313-cp313t-win32.whl", hash = "sha256:08788d27a5fd867a663f6fc753fd7c3ad7e92747efc73c53bca2f19f8bc06f48", size = 6291907 },
39
+ { url = "https://files.pythonhosted.org/packages/86/09/a5ab407bd7f5f5599e6a9261f964ace03a73e7c6928de906981c31c38082/numpy-2.1.3-cp313-cp313t-win_amd64.whl", hash = "sha256:2564fbdf2b99b3f815f2107c1bbc93e2de8ee655a69c261363a1172a79a257d4", size = 12644098 },
40
+ ]
41
+
42
+ [[package]]
43
+ name = "pandas"
44
+ version = "2.2.3"
45
+ source = { registry = "https://pypi.org/simple" }
46
+ dependencies = [
47
+ { name = "numpy" },
48
+ { name = "python-dateutil" },
49
+ { name = "pytz" },
50
+ { name = "tzdata" },
51
+ ]
52
+ sdist = { url = "https://files.pythonhosted.org/packages/9c/d6/9f8431bacc2e19dca897724cd097b1bb224a6ad5433784a44b587c7c13af/pandas-2.2.3.tar.gz", hash = "sha256:4f18ba62b61d7e192368b84517265a99b4d7ee8912f8708660fb4a366cc82667", size = 4399213 }
53
+ wheels = [
54
+ { url = "https://files.pythonhosted.org/packages/17/a3/fb2734118db0af37ea7433f57f722c0a56687e14b14690edff0cdb4b7e58/pandas-2.2.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b1d432e8d08679a40e2a6d8b2f9770a5c21793a6f9f47fdd52c5ce1948a5a8a9", size = 12529893 },
55
+ { url = "https://files.pythonhosted.org/packages/e1/0c/ad295fd74bfac85358fd579e271cded3ac969de81f62dd0142c426b9da91/pandas-2.2.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a5a1595fe639f5988ba6a8e5bc9649af3baf26df3998a0abe56c02609392e0a4", size = 11363475 },
56
+ { url = "https://files.pythonhosted.org/packages/c6/2a/4bba3f03f7d07207481fed47f5b35f556c7441acddc368ec43d6643c5777/pandas-2.2.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5de54125a92bb4d1c051c0659e6fcb75256bf799a732a87184e5ea503965bce3", size = 15188645 },
57
+ { url = "https://files.pythonhosted.org/packages/38/f8/d8fddee9ed0d0c0f4a2132c1dfcf0e3e53265055da8df952a53e7eaf178c/pandas-2.2.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fffb8ae78d8af97f849404f21411c95062db1496aeb3e56f146f0355c9989319", size = 12739445 },
58
+ { url = "https://files.pythonhosted.org/packages/20/e8/45a05d9c39d2cea61ab175dbe6a2de1d05b679e8de2011da4ee190d7e748/pandas-2.2.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6dfcb5ee8d4d50c06a51c2fffa6cff6272098ad6540aed1a76d15fb9318194d8", size = 16359235 },
59
+ { url = "https://files.pythonhosted.org/packages/1d/99/617d07a6a5e429ff90c90da64d428516605a1ec7d7bea494235e1c3882de/pandas-2.2.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:062309c1b9ea12a50e8ce661145c6aab431b1e99530d3cd60640e255778bd43a", size = 14056756 },
60
+ { url = "https://files.pythonhosted.org/packages/29/d4/1244ab8edf173a10fd601f7e13b9566c1b525c4f365d6bee918e68381889/pandas-2.2.3-cp312-cp312-win_amd64.whl", hash = "sha256:59ef3764d0fe818125a5097d2ae867ca3fa64df032331b7e0917cf5d7bf66b13", size = 11504248 },
61
+ { url = "https://files.pythonhosted.org/packages/64/22/3b8f4e0ed70644e85cfdcd57454686b9057c6c38d2f74fe4b8bc2527214a/pandas-2.2.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f00d1345d84d8c86a63e476bb4955e46458b304b9575dcf71102b5c705320015", size = 12477643 },
62
+ { url = "https://files.pythonhosted.org/packages/e4/93/b3f5d1838500e22c8d793625da672f3eec046b1a99257666c94446969282/pandas-2.2.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3508d914817e153ad359d7e069d752cdd736a247c322d932eb89e6bc84217f28", size = 11281573 },
63
+ { url = "https://files.pythonhosted.org/packages/f5/94/6c79b07f0e5aab1dcfa35a75f4817f5c4f677931d4234afcd75f0e6a66ca/pandas-2.2.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:22a9d949bfc9a502d320aa04e5d02feab689d61da4e7764b62c30b991c42c5f0", size = 15196085 },
64
+ { url = "https://files.pythonhosted.org/packages/e8/31/aa8da88ca0eadbabd0a639788a6da13bb2ff6edbbb9f29aa786450a30a91/pandas-2.2.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3a255b2c19987fbbe62a9dfd6cff7ff2aa9ccab3fc75218fd4b7530f01efa24", size = 12711809 },
65
+ { url = "https://files.pythonhosted.org/packages/ee/7c/c6dbdb0cb2a4344cacfb8de1c5808ca885b2e4dcfde8008266608f9372af/pandas-2.2.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:800250ecdadb6d9c78eae4990da62743b857b470883fa27f652db8bdde7f6659", size = 16356316 },
66
+ { url = "https://files.pythonhosted.org/packages/57/b7/8b757e7d92023b832869fa8881a992696a0bfe2e26f72c9ae9f255988d42/pandas-2.2.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6374c452ff3ec675a8f46fd9ab25c4ad0ba590b71cf0656f8b6daa5202bca3fb", size = 14022055 },
67
+ { url = "https://files.pythonhosted.org/packages/3b/bc/4b18e2b8c002572c5a441a64826252ce5da2aa738855747247a971988043/pandas-2.2.3-cp313-cp313-win_amd64.whl", hash = "sha256:61c5ad4043f791b61dd4752191d9f07f0ae412515d59ba8f005832a532f8736d", size = 11481175 },
68
+ { url = "https://files.pythonhosted.org/packages/76/a3/a5d88146815e972d40d19247b2c162e88213ef51c7c25993942c39dbf41d/pandas-2.2.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:3b71f27954685ee685317063bf13c7709a7ba74fc996b84fc6821c59b0f06468", size = 12615650 },
69
+ { url = "https://files.pythonhosted.org/packages/9c/8c/f0fd18f6140ddafc0c24122c8a964e48294acc579d47def376fef12bcb4a/pandas-2.2.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:38cf8125c40dae9d5acc10fa66af8ea6fdf760b2714ee482ca691fc66e6fcb18", size = 11290177 },
70
+ { url = "https://files.pythonhosted.org/packages/ed/f9/e995754eab9c0f14c6777401f7eece0943840b7a9fc932221c19d1abee9f/pandas-2.2.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ba96630bc17c875161df3818780af30e43be9b166ce51c9a18c1feae342906c2", size = 14651526 },
71
+ { url = "https://files.pythonhosted.org/packages/25/b0/98d6ae2e1abac4f35230aa756005e8654649d305df9a28b16b9ae4353bff/pandas-2.2.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1db71525a1538b30142094edb9adc10be3f3e176748cd7acc2240c2f2e5aa3a4", size = 11871013 },
72
+ { url = "https://files.pythonhosted.org/packages/cc/57/0f72a10f9db6a4628744c8e8f0df4e6e21de01212c7c981d31e50ffc8328/pandas-2.2.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:15c0e1e02e93116177d29ff83e8b1619c93ddc9c49083f237d4312337a61165d", size = 15711620 },
73
+ { url = "https://files.pythonhosted.org/packages/ab/5f/b38085618b950b79d2d9164a711c52b10aefc0ae6833b96f626b7021b2ed/pandas-2.2.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:ad5b65698ab28ed8d7f18790a0dc58005c7629f227be9ecc1072aa74c0c1d43a", size = 13098436 },
74
+ ]
75
+
76
+ [[package]]
77
+ name = "python-dateutil"
78
+ version = "2.9.0.post0"
79
+ source = { registry = "https://pypi.org/simple" }
80
+ dependencies = [
81
+ { name = "six" },
82
+ ]
83
+ sdist = { url = "https://files.pythonhosted.org/packages/66/c0/0c8b6ad9f17a802ee498c46e004a0eb49bc148f2fd230864601a86dcf6db/python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 342432 }
84
+ wheels = [
85
+ { url = "https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427", size = 229892 },
86
+ ]
87
+
88
+ [[package]]
89
+ name = "pytz"
90
+ version = "2024.2"
91
+ source = { registry = "https://pypi.org/simple" }
92
+ sdist = { url = "https://files.pythonhosted.org/packages/3a/31/3c70bf7603cc2dca0f19bdc53b4537a797747a58875b552c8c413d963a3f/pytz-2024.2.tar.gz", hash = "sha256:2aa355083c50a0f93fa581709deac0c9ad65cca8a9e9beac660adcbd493c798a", size = 319692 }
93
+ wheels = [
94
+ { url = "https://files.pythonhosted.org/packages/11/c3/005fcca25ce078d2cc29fd559379817424e94885510568bc1bc53d7d5846/pytz-2024.2-py2.py3-none-any.whl", hash = "sha256:31c7c1817eb7fae7ca4b8c7ee50c72f93aa2dd863de768e1ef4245d426aa0725", size = 508002 },
95
+ ]
96
+
97
+ [[package]]
98
+ name = "relib"
99
+ version = "1.2.2"
100
+ source = { editable = "." }
101
+
102
+ [package.dev-dependencies]
103
+ dev = [
104
+ { name = "numpy" },
105
+ { name = "pandas" },
106
+ ]
107
+
108
+ [package.metadata]
109
+
110
+ [package.metadata.requires-dev]
111
+ dev = [
112
+ { name = "numpy", specifier = ">=2.1.3" },
113
+ { name = "pandas", specifier = ">=2.2.3" },
114
+ ]
115
+
116
+ [[package]]
117
+ name = "six"
118
+ version = "1.16.0"
119
+ source = { registry = "https://pypi.org/simple" }
120
+ sdist = { url = "https://files.pythonhosted.org/packages/71/39/171f1c67cd00715f190ba0b100d606d440a28c93c7714febeca8b79af85e/six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926", size = 34041 }
121
+ wheels = [
122
+ { url = "https://files.pythonhosted.org/packages/d9/5a/e7c31adbe875f2abbb91bd84cf2dc52d792b5a01506781dbcf25c91daf11/six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254", size = 11053 },
123
+ ]
124
+
125
+ [[package]]
126
+ name = "tzdata"
127
+ version = "2024.2"
128
+ source = { registry = "https://pypi.org/simple" }
129
+ sdist = { url = "https://files.pythonhosted.org/packages/e1/34/943888654477a574a86a98e9896bae89c7aa15078ec29f490fef2f1e5384/tzdata-2024.2.tar.gz", hash = "sha256:7d85cc416e9382e69095b7bdf4afd9e3880418a2413feec7069d533d6b4e31cc", size = 193282 }
130
+ wheels = [
131
+ { url = "https://files.pythonhosted.org/packages/a6/ab/7e5f53c3b9d14972843a647d8d7a853969a58aecc7559cb3267302c94774/tzdata-2024.2-py2.py3-none-any.whl", hash = "sha256:a48093786cdcde33cad18c2555e8532f34422074448fbc874186f0abd79565cd", size = 346586 },
132
+ ]
@@ -1,255 +0,0 @@
1
- """
2
- Fast cryptographic hash of Python objects, with a special case for fast
3
- hashing of numpy arrays.
4
- """
5
-
6
- # Author: Gael Varoquaux <gael dot varoquaux at normalesup dot org>
7
- # Copyright (c) 2009 Gael Varoquaux
8
- # License: BSD Style, 3 clauses.
9
-
10
- import pickle
11
- import hashlib
12
- import sys
13
- import types
14
- import struct
15
- import io
16
- import decimal
17
-
18
- Pickler = pickle._Pickler
19
- _bytes_or_unicode = (bytes, str)
20
-
21
-
22
- class _ConsistentSet(object):
23
- """ Class used to ensure the hash of Sets is preserved
24
- whatever the order of its items.
25
- """
26
- def __init__(self, set_sequence):
27
- # Forces order of elements in set to ensure consistent hash.
28
- try:
29
- # Trying first to order the set assuming the type of elements is
30
- # consistent and orderable.
31
- # This fails on python 3 when elements are unorderable
32
- # but we keep it in a try as it's faster.
33
- self._sequence = sorted(set_sequence)
34
- except (TypeError, decimal.InvalidOperation):
35
- # If elements are unorderable, sorting them using their hash.
36
- # This is slower but works in any case.
37
- self._sequence = sorted((hash(e) for e in set_sequence))
38
-
39
-
40
- class _MyHash(object):
41
- """ Class used to hash objects that won't normally pickle """
42
-
43
- def __init__(self, *args):
44
- self.args = args
45
-
46
-
47
- class Hasher(Pickler):
48
- """ A subclass of pickler, to do cryptographic hashing, rather than
49
- pickling.
50
- """
51
-
52
- def __init__(self, hash_name='md5'):
53
- self.stream = io.BytesIO()
54
- # By default we want a pickle protocol that only changes with
55
- # the major python version and not the minor one
56
- protocol = pickle.HIGHEST_PROTOCOL
57
- Pickler.__init__(self, self.stream, protocol=protocol)
58
- # Initialise the hash obj
59
- self._hash = hashlib.new(hash_name)
60
-
61
- def hash(self, obj, return_digest=True):
62
- try:
63
- self.dump(obj)
64
- except pickle.PicklingError as e:
65
- e.args += ('PicklingError while hashing %r: %r' % (obj, e),)
66
- raise
67
- dumps = self.stream.getvalue()
68
- self._hash.update(dumps)
69
- if return_digest:
70
- return self._hash.hexdigest()
71
-
72
- def save(self, obj):
73
- if isinstance(obj, (types.MethodType, type({}.pop))):
74
- # the Pickler cannot pickle instance methods; here we decompose
75
- # them into components that make them uniquely identifiable
76
- if hasattr(obj, '__func__'):
77
- func_name = obj.__func__.__name__
78
- else:
79
- func_name = obj.__name__
80
- inst = obj.__self__
81
- if type(inst) == type(pickle):
82
- obj = _MyHash(func_name, inst.__name__)
83
- elif inst is None:
84
- # type(None) or type(module) do not pickle
85
- obj = _MyHash(func_name, inst)
86
- else:
87
- cls = obj.__self__.__class__
88
- obj = _MyHash(func_name, inst, cls)
89
- Pickler.save(self, obj)
90
-
91
- def memoize(self, obj):
92
- # We want hashing to be sensitive to value instead of reference.
93
- # For example we want ['aa', 'aa'] and ['aa', 'aaZ'[:2]]
94
- # to hash to the same value and that's why we disable memoization
95
- # for strings
96
- if isinstance(obj, _bytes_or_unicode):
97
- return
98
- Pickler.memoize(self, obj)
99
-
100
- # The dispatch table of the pickler is not accessible in Python
101
- # 3, as these lines are only bugware for IPython, we skip them.
102
- def save_global(self, obj, name=None, pack=struct.pack):
103
- # We have to override this method in order to deal with objects
104
- # defined interactively in IPython that are not injected in
105
- # __main__
106
- kwargs = dict(name=name, pack=pack)
107
- if sys.version_info >= (3, 4):
108
- del kwargs['pack']
109
- try:
110
- Pickler.save_global(self, obj, **kwargs)
111
- except pickle.PicklingError:
112
- Pickler.save_global(self, obj, **kwargs)
113
- module = getattr(obj, "__module__", None)
114
- if module == '__main__':
115
- my_name = name
116
- if my_name is None:
117
- my_name = obj.__name__
118
- mod = sys.modules[module]
119
- if not hasattr(mod, my_name):
120
- # IPython doesn't inject the variables define
121
- # interactively in __main__
122
- setattr(mod, my_name, obj)
123
-
124
- dispatch = Pickler.dispatch.copy()
125
- # builtin
126
- dispatch[type(len)] = save_global
127
- # type
128
- dispatch[type(object)] = save_global
129
- # classobj
130
- dispatch[type(Pickler)] = save_global
131
- # function
132
- dispatch[type(pickle.dump)] = save_global
133
-
134
- def _batch_setitems(self, items):
135
- # forces order of keys in dict to ensure consistent hash.
136
- try:
137
- # Trying first to compare dict assuming the type of keys is
138
- # consistent and orderable.
139
- # This fails on python 3 when keys are unorderable
140
- # but we keep it in a try as it's faster.
141
- Pickler._batch_setitems(self, iter(sorted(items)))
142
- except TypeError:
143
- # If keys are unorderable, sorting them using their hash. This is
144
- # slower but works in any case.
145
- Pickler._batch_setitems(self, iter(sorted((hash(k), v)
146
- for k, v in items)))
147
-
148
- def save_set(self, set_items):
149
- # forces order of items in Set to ensure consistent hash
150
- Pickler.save(self, _ConsistentSet(set_items))
151
-
152
- dispatch[type(set())] = save_set
153
-
154
-
155
- class NumpyHasher(Hasher):
156
- """ Special case the hasher for when numpy is loaded.
157
- """
158
-
159
- def __init__(self, hash_name='md5', coerce_mmap=False):
160
- """
161
- Parameters
162
- ----------
163
- hash_name: string
164
- The hash algorithm to be used
165
- coerce_mmap: boolean
166
- Make no difference between np.memmap and np.ndarray
167
- objects.
168
- """
169
- self.coerce_mmap = coerce_mmap
170
- Hasher.__init__(self, hash_name=hash_name)
171
- # delayed import of numpy, to avoid tight coupling
172
- import numpy as np
173
- self.np = np
174
- if hasattr(np, 'getbuffer'):
175
- self._getbuffer = np.getbuffer
176
- else:
177
- self._getbuffer = memoryview
178
-
179
- def save(self, obj):
180
- """ Subclass the save method, to hash ndarray subclass, rather
181
- than pickling them. Off course, this is a total abuse of
182
- the Pickler class.
183
- """
184
- if isinstance(obj, self.np.ndarray) and not obj.dtype.hasobject:
185
- # Compute a hash of the object
186
- # The update function of the hash requires a c_contiguous buffer.
187
- if obj.shape == ():
188
- # 0d arrays need to be flattened because viewing them as bytes
189
- # raises a ValueError exception.
190
- obj_c_contiguous = obj.flatten()
191
- elif obj.flags.c_contiguous:
192
- obj_c_contiguous = obj
193
- elif obj.flags.f_contiguous:
194
- obj_c_contiguous = obj.T
195
- else:
196
- # Cater for non-single-segment arrays: this creates a
197
- # copy, and thus aleviates this issue.
198
- # XXX: There might be a more efficient way of doing this
199
- obj_c_contiguous = obj.flatten()
200
-
201
- # memoryview is not supported for some dtypes, e.g. datetime64, see
202
- # https://github.com/numpy/numpy/issues/4983. The
203
- # workaround is to view the array as bytes before
204
- # taking the memoryview.
205
- self._hash.update(
206
- self._getbuffer(obj_c_contiguous.view(self.np.uint8)))
207
-
208
- # We store the class, to be able to distinguish between
209
- # Objects with the same binary content, but different
210
- # classes.
211
- if self.coerce_mmap and isinstance(obj, self.np.memmap):
212
- # We don't make the difference between memmap and
213
- # normal ndarrays, to be able to reload previously
214
- # computed results with memmap.
215
- klass = self.np.ndarray
216
- else:
217
- klass = obj.__class__
218
- # We also return the dtype and the shape, to distinguish
219
- # different views on the same data with different dtypes.
220
-
221
- # The object will be pickled by the pickler hashed at the end.
222
- obj = (klass, ('HASHED', obj.dtype, obj.shape, obj.strides))
223
- elif isinstance(obj, self.np.dtype):
224
- # Atomic dtype objects are interned by their default constructor:
225
- # np.dtype('f8') is np.dtype('f8')
226
- # This interning is not maintained by a
227
- # pickle.loads + pickle.dumps cycle, because __reduce__
228
- # uses copy=True in the dtype constructor. This
229
- # non-deterministic behavior causes the internal memoizer
230
- # of the hasher to generate different hash values
231
- # depending on the history of the dtype object.
232
- # To prevent the hash from being sensitive to this, we use
233
- # .descr which is a full (and never interned) description of
234
- # the array dtype according to the numpy doc.
235
- klass = obj.__class__
236
- obj = (klass, ('HASHED', obj.descr))
237
- Hasher.save(self, obj)
238
-
239
-
240
- def hash(obj, hash_name='md5', coerce_mmap=False) -> str:
241
- """ Quick calculation of a hash to identify uniquely Python objects
242
- containing numpy arrays.
243
- Parameters
244
- -----------
245
- hash_name: 'md5' or 'sha1'
246
- Hashing algorithm used. sha1 is supposedly safer, but md5 is
247
- faster.
248
- coerce_mmap: boolean
249
- Make no difference between np.memmap and np.ndarray
250
- """
251
- if 'numpy' in sys.modules:
252
- hasher = NumpyHasher(hash_name=hash_name, coerce_mmap=coerce_mmap)
253
- else:
254
- hasher = Hasher(hash_name=hash_name)
255
- return hasher.hash(obj)
relib-1.2.0/uv.lock DELETED
@@ -1,7 +0,0 @@
1
- version = 1
2
- requires-python = ">=3.12"
3
-
4
- [[package]]
5
- name = "relib"
6
- version = "1.2.0"
7
- source = { editable = "." }
File without changes
File without changes
File without changes