pydasa 0.4.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (58) hide show
  1. pydasa/__init__.py +103 -0
  2. pydasa/_version.py +6 -0
  3. pydasa/analysis/__init__.py +0 -0
  4. pydasa/analysis/scenario.py +584 -0
  5. pydasa/analysis/simulation.py +1158 -0
  6. pydasa/context/__init__.py +0 -0
  7. pydasa/context/conversion.py +11 -0
  8. pydasa/context/system.py +17 -0
  9. pydasa/context/units.py +15 -0
  10. pydasa/core/__init__.py +15 -0
  11. pydasa/core/basic.py +287 -0
  12. pydasa/core/cfg/default.json +136 -0
  13. pydasa/core/constants.py +27 -0
  14. pydasa/core/io.py +102 -0
  15. pydasa/core/setup.py +269 -0
  16. pydasa/dimensional/__init__.py +0 -0
  17. pydasa/dimensional/buckingham.py +728 -0
  18. pydasa/dimensional/fundamental.py +146 -0
  19. pydasa/dimensional/model.py +1077 -0
  20. pydasa/dimensional/vaschy.py +633 -0
  21. pydasa/elements/__init__.py +19 -0
  22. pydasa/elements/parameter.py +218 -0
  23. pydasa/elements/specs/__init__.py +22 -0
  24. pydasa/elements/specs/conceptual.py +161 -0
  25. pydasa/elements/specs/numerical.py +469 -0
  26. pydasa/elements/specs/statistical.py +229 -0
  27. pydasa/elements/specs/symbolic.py +394 -0
  28. pydasa/serialization/__init__.py +27 -0
  29. pydasa/serialization/parser.py +133 -0
  30. pydasa/structs/__init__.py +0 -0
  31. pydasa/structs/lists/__init__.py +0 -0
  32. pydasa/structs/lists/arlt.py +578 -0
  33. pydasa/structs/lists/dllt.py +18 -0
  34. pydasa/structs/lists/ndlt.py +262 -0
  35. pydasa/structs/lists/sllt.py +746 -0
  36. pydasa/structs/tables/__init__.py +0 -0
  37. pydasa/structs/tables/htme.py +182 -0
  38. pydasa/structs/tables/scht.py +774 -0
  39. pydasa/structs/tools/__init__.py +0 -0
  40. pydasa/structs/tools/hashing.py +53 -0
  41. pydasa/structs/tools/math.py +149 -0
  42. pydasa/structs/tools/memory.py +54 -0
  43. pydasa/structs/types/__init__.py +0 -0
  44. pydasa/structs/types/functions.py +131 -0
  45. pydasa/structs/types/generics.py +54 -0
  46. pydasa/validations/__init__.py +0 -0
  47. pydasa/validations/decorators.py +510 -0
  48. pydasa/validations/error.py +100 -0
  49. pydasa/validations/patterns.py +32 -0
  50. pydasa/workflows/__init__.py +1 -0
  51. pydasa/workflows/influence.py +497 -0
  52. pydasa/workflows/phenomena.py +529 -0
  53. pydasa/workflows/practical.py +765 -0
  54. pydasa-0.4.7.dist-info/METADATA +320 -0
  55. pydasa-0.4.7.dist-info/RECORD +58 -0
  56. pydasa-0.4.7.dist-info/WHEEL +5 -0
  57. pydasa-0.4.7.dist-info/licenses/LICENSE +674 -0
  58. pydasa-0.4.7.dist-info/top_level.txt +1 -0
@@ -0,0 +1,774 @@
1
+ # -*- coding: utf-8 -*-
2
+ """
3
+ Module scht.py
4
+ ===========================================
5
+
6
+ Module to represent the **SCHashTable** data structure for the **Hash Table** in *PyDASA*.
7
+
8
+ Classes:
9
+ **Bucket**: Represents a bucket in the hash table using a single linked list.
10
+ **SCHashTable**: Implements a hash table with separate chaining for collision resolution, supporting dynamic resizing and customizable comparison functions.
11
+
12
+ *IMPORTANT:* based on the implementations proposed by the following authors/books:
13
+
14
+ # . Algorithms, 4th Edition, Robert Sedgewick and Kevin Wayne.
15
+ # . Data Structure and Algorithms in Python, M.T. Goodrich, R. Tamassia, M.H. Goldwasser.
16
+ """
17
+
18
+ # native python modules
19
+ # dataclass imports
20
+ from dataclasses import dataclass
21
+ # data type imports
22
+ from typing import List, Optional, Callable, Generic, Any
23
+ from typing import cast
24
+ # code inspection imports
25
+ import inspect
26
+ # hashing support imports
27
+ import random
28
+
29
+ # custom modules
30
+ # data structure classes imports
31
+ from pydasa.structs.tables.htme import MapEntry
32
+ from pydasa.structs.lists.arlt import ArrayList
33
+ from pydasa.structs.lists.sllt import SingleLinkedList
34
+ # hashing support imports
35
+ from pydasa.structs.tools.hashing import mad_hash
36
+ from pydasa.structs.tools.math import next_prime, previous_prime
37
+ # generic types and global variables
38
+ from pydasa.structs.types.generics import T
39
+ from pydasa.structs.types.generics import DFLT_DICT_KEY
40
+ from pydasa.structs.types.generics import VLD_IOTYPE_LT
41
+ from pydasa.structs.types.generics import DFLT_PRIME
42
+ from pydasa.structs.types.functions import dflt_cmp_function_ht
43
+ # generic error handling and type checking
44
+ from pydasa.validations.error import handle_error as error
45
+
46
+ # checking custom modules
47
+ assert MapEntry
48
+ assert ArrayList
49
+ assert SingleLinkedList
50
+ assert mad_hash
51
+ assert next_prime
52
+ assert previous_prime
53
+ assert T
54
+ assert DFLT_DICT_KEY
55
+ assert VLD_IOTYPE_LT
56
+ assert DFLT_PRIME
57
+ assert dflt_cmp_function_ht
58
+ assert error
59
+
60
+ # default load factor for separating chaining
61
+ # :data: DFLT_SC_ALPHA
62
+ DFLT_SC_ALPHA: float = 4.0
63
+ """
64
+ Default load factor (*alpha*) for the *SCHashTable*, by default is 4.0.
65
+ """
66
+
67
+ # :data: MAX_SC_ALPHA
68
+ MAX_SC_ALPHA: float = 8.0
69
+ """
70
+ Maximum load factor (*alpha*) for the *SCHashTable*, by default is 8.0.
71
+ """
72
+
73
+ # :data: MIN_SC_ALPHA
74
+ MIN_SC_ALPHA: float = 2.0
75
+ """
76
+ Minimum load factor (*alpha*) for the *SCHashTable*, by default is 2.0.
77
+ """
78
+
79
+
80
+ @dataclass
81
+ class Bucket(Generic[T], SingleLinkedList[MapEntry[T]]):
82
+ """**Bucket** class to represent a bucket in the **Hash Table** with the *Separate Chaining* method. The structure is based (inherits) on a custom singly linked list (*SingleLinkedList*) for *PyDASA*.
83
+
84
+ Args:
85
+ SingleLinkedList (dataclass): *PyDASA* custom class for a single linked list.
86
+ Generic (T): Generic type for a Python data structure.
87
+ """
88
+
89
+ def __str__(self) -> str:
90
+ """*__str__()* function to return a string representation of the *Bucket*. It also extends the *Node* class.
91
+
92
+ Returns:
93
+ str: string representation of the *Bucket*.
94
+ """
95
+ _str = super().__str__()
96
+ return _str
97
+
98
+ def __repr__(self) -> str:
99
+ """*__repr__()* function to return a string representation of the *Bucket*. It also extends the *Node* class.
100
+
101
+ Returns:
102
+ str: string representation of the *Bucket*.
103
+ """
104
+ _str = super().__repr__()
105
+ return _str
106
+
107
+
108
+ @dataclass
109
+ class SCHashTable(Generic[T]):
110
+
111
+ # boolean to indicate if the hash table can be rehashed
112
+ # :attr: rehashable
113
+ rehashable: bool = True
114
+ """
115
+ Boolean to indicate if the hash table can be rehashed. By default is True.
116
+ """
117
+
118
+ # reserved space for the hash table
119
+ # :attr: nentries
120
+ nentries: int = 1
121
+ """
122
+ Inicial number of entries (n) for the hash table. By default is 1, but should be set according to the number of entries expected to be stored.
123
+
124
+ NOTE: the reserved space (n) is NOT the capacity (M) of the hash table.
125
+ """
126
+
127
+ # starting capacity (M|m) for the hash table
128
+ # :attr: mcapacity
129
+ mcapacity: int = 1
130
+ """
131
+ The capacity (M) of the hash table. By default is 1, but should be set according to the number of entries expected to be stored.
132
+ """
133
+
134
+ # starting load factor (alpha) for the hash table
135
+ # :attr: alpha
136
+ alpha: Optional[float] = DFLT_SC_ALPHA
137
+ """
138
+ Load factor (*alpha*) for the hash table. By default is 4.0.
139
+
140
+ NOTE: alpha = n/M (n: number of expected entries, M: capacity of the hash table).
141
+ """
142
+
143
+ # the cmp_function is used to compare emtries, not defined by default
144
+ # :attr: cmp_function
145
+ cmp_function: Optional[Callable[[Any, MapEntry[T]], int]] = None
146
+
147
+ """
148
+ Customizable comparison function for *SCHashTable* and its *MapEntry* objects. Defaults to *dflt_cmp_function_ht()* from *PyDASA*, but can be overridden by the user.
149
+ """
150
+
151
+ # actual place to store the entries in the hash table
152
+ # :attr: hash_table
153
+ hash_table: Optional[ArrayList[Bucket[T]]] = None
154
+
155
+ """
156
+ Index of the hash table where the *Buckets* are stored. By default is an empty *ArrayList* initialized with the configured capacity (M).
157
+ """
158
+ # the key is used to compare entries, not defined by default
159
+ # :attr: key
160
+ key: Optional[str] = DFLT_DICT_KEY
161
+ """
162
+ Customizable key name for identifying elements in the *SCHashTable*. Defaults to *DFLT_DICT_KEY = '_id'* from *PyDASA*, but can be overridden by the user.
163
+ """
164
+
165
+ # prime number (P) for the MAD compression function
166
+ # :attr: prime
167
+ prime: int = DFLT_PRIME
168
+ """
169
+ Prime number (P) for the MAD compression function. By default is 109345121, but can be overridden by the user.
170
+
171
+ NOTE: the MAD compression function is: *h(k) = ((a*k + b) mod P) mod M*, where *a* and *b* are two random integers, *P* is a prime number and *M* is the hash table capacity.
172
+ """
173
+
174
+ # private scale (a) factor for the mad compression function
175
+ # :attr: _scale
176
+ _scale: int = 1
177
+ """
178
+ MAD compression function scale factor (a). By default is 1, but can be overridden by the user.
179
+ """
180
+ # private shift (b) factor for the mad compression function
181
+ # :attr: _shift
182
+ _shift: int = 0
183
+ """
184
+ MAD compression function shift factor (b). By default is 0, but can be overridden by the user.
185
+ """
186
+
187
+ # current factor (alpha) for the working hash table
188
+ # :attr: _cur_alpha
189
+ _cur_alpha: float = 0.0
190
+ """
191
+ Current load factor (*alpha*) for the hash table. By default is 0.0, and it updates with each operation that modifies the structure.
192
+ """
193
+
194
+ # minimum load factor (alpha) for the hash table
195
+ # :attr: min_alpha
196
+ min_alpha: float = MIN_SC_ALPHA
197
+ """
198
+ Minimum load factor (*alpha*) for the hash table. By default is 2.0. But can be overridden by the user.
199
+ """
200
+
201
+ # maximum load factor (alpha) for the hash table
202
+ # :attr: max_alpha
203
+ max_alpha: float = MAX_SC_ALPHA
204
+ """
205
+ Maximum load factor (*alpha*) for the hash table. By default is 8.0. But can be overridden by the user.
206
+ """
207
+
208
+ # actual number of used entries (n) in the hash table
209
+ # :attr: _size
210
+ _size: int = 0
211
+ """
212
+ Number of entries (*n*) in the hash table. By default is 0, but it updates with each operation that modifies the structure.
213
+ """
214
+
215
+ # :attr: collisions
216
+ _collisions: int = 0
217
+ """
218
+ Number of collisions in the hash table. By default is 0, but it updates with each operation that modifies the structure.
219
+ """
220
+
221
+ # the type of the entry keys in the hash table
222
+ # :attr: _key_type
223
+ _key_type: Optional[type] = None
224
+ """
225
+ Data type for the keys of the *MapEntry* (key-value pair) that contains the hash table, by default is *None* and is configured when loading the first record.
226
+ """
227
+
228
+ # the type of the entry values in the hash table
229
+ # :attr: _value_type
230
+ _value_type: Optional[type] = None
231
+ """
232
+ Data type for the values of the *MapEntry* (key-value pair) that contains the hash table, by default is *None* and is configured when loading the first record.
233
+ """
234
+
235
+ # input elements from python list
236
+ # :attr: iodata
237
+ iodata: Optional[List[T]] = None
238
+ """
239
+ Optional Python list for loading external data intho the *SCHashTable*. Defaults to *None* but can be provided during creation.
240
+ """
241
+
242
+ def __post_init__(self) -> None:
243
+ """*__post_init__()* Initializes the *SCHashTable* after creation by setting attributes like *rehashable*, *mcapacity*, *alpha*, *cmp_function*, *key*, *prime*, *scale*, *shift*, and *iodata*.
244
+ It also sets the default values for the *min_alpha* and *max_alpha* attributes, which are used to control the load factor of the hash table.
245
+
246
+ *NOTE:* Special method called automatically after object creation.
247
+ """
248
+ try:
249
+ # Ensure alpha has a valid value
250
+ alpha = self.alpha if self.alpha is not None else DFLT_SC_ALPHA
251
+
252
+ # setting capacity
253
+ self.mcapacity = next_prime(int(self.nentries / alpha))
254
+ # setting scale and shift for MAD compression function
255
+ self._scale = random.randint(1, self.prime - 1)
256
+ self._shift = random.randint(0, self.prime - 1)
257
+
258
+ # setting the compare function
259
+ if self.cmp_function is None:
260
+ self.cmp_function = self.default_compare
261
+
262
+ # initializing new hash table with explicit type parameter
263
+ self.hash_table = ArrayList[Bucket[T]](cmp_function=None,
264
+ key=self.key)
265
+
266
+ # building buckets in the hash table
267
+ for _ in range(self.mcapacity):
268
+ # bucket needs cmp_function for index_of() searches
269
+ _bucket = Bucket[T](cmp_function=self.cmp_function,
270
+ key=self.key)
271
+ # add the bucket to the hash table
272
+ self.hash_table.append(_bucket)
273
+
274
+ # setting the current load factor
275
+ self._cur_alpha = self._size / self.mcapacity
276
+
277
+ # checking the external input data type
278
+ if self.iodata is not None and isinstance(self.iodata, VLD_IOTYPE_LT):
279
+ for entry in self.iodata:
280
+ # if is a dict, use the key type
281
+ if isinstance(entry, dict):
282
+ _key = entry.get(self.key)
283
+ if _key is not None:
284
+ self.insert(_key, entry)
285
+ # otherwise, manage as data list
286
+ else:
287
+ self.insert(entry, entry)
288
+ # clean input data
289
+ self.iodata = None
290
+
291
+ except Exception as err:
292
+ self._error_handler(err)
293
+ raise # Re-raise after handling
294
+
295
+ def default_compare(self, key1: Any, entry2: MapEntry[T]) -> int:
296
+ """*default_compare()* Default comparison function for the *SCHashTable* and its *MapEntry* objects. Compares the key of the *MapEntry* with the provided key *key1* and reurns:
297
+ - 0 if they are equal.
298
+ - 1 if the *MapEntry* key is less than *key1*.
299
+ - -1 if the *MapEntry* key is greater than *key1*.
300
+
301
+ Args:
302
+ key1 (Hashable): Key from the first *MapEntry* to compare.
303
+ entry2 (MapEntry): Second *MapEntry* to compare.
304
+
305
+ Returns:
306
+ int: Comparison result.
307
+ """
308
+ try:
309
+ # default comparison needs the key to be defined
310
+ if self.key is None:
311
+ raise ValueError("Key must be set before comparison")
312
+ return dflt_cmp_function_ht(key1, entry2, self.key)
313
+ except Exception as err:
314
+ self._error_handler(err)
315
+ raise # Re-raise the exception after handling
316
+
317
+ @property
318
+ def size(self) -> int:
319
+ """*size* Property to retrieve the number if entries (n) in the *SCHashTable*.
320
+ Returns:
321
+ int: Number of entries (n) in the *SCHashTable*.
322
+ """
323
+ return self._size
324
+
325
+ @property
326
+ def empty(self) -> bool:
327
+ """*empty* Property to check if the *SCHashTable* has entries or not.
328
+
329
+ Returns:
330
+ bool: True if the *SCHashTable* is empty, False otherwise.
331
+ """
332
+ return self._size == 0
333
+
334
+ @property
335
+ def collisions(self) -> int:
336
+ """*collisions* Property to retrieve the number of collisions in the *SCHashTable*.
337
+
338
+ Returns:
339
+ int: Number of collisions in the *SCHashTable*.
340
+ """
341
+ return self._collisions
342
+
343
+ def clear(self) -> None:
344
+ """*clear()* function to reset the *SCHashTable* to its initial state. It clears all the entries in the hash table and resets the size, collisions and current load factor.
345
+ """
346
+ # reset the size, collisions and current load factor
347
+ self._size = 0
348
+ self._collisions = 0
349
+ self._cur_alpha = 0
350
+ # clear the bukets in the hash table
351
+ if self.hash_table is not None:
352
+ for _bucket in self.hash_table:
353
+ _bucket.clear()
354
+ # clear the hash table itself
355
+ self.hash_table.clear()
356
+
357
+ def insert(self, key: T, value: T) -> None:
358
+ """*insert()* adds a new entry to the *SCHashTable*. It creates a new *MapEntry* object with the key-value pair.
359
+
360
+ Args:
361
+ key (T): key for the entry.
362
+ value (T): value for the entry.
363
+ """
364
+ # create a new entry for the hash table
365
+ _new_entry = MapEntry(key, value)
366
+ _idx = -1
367
+ # cheking the type of the entry
368
+ if self._validate_type(_new_entry):
369
+ # get the hash key for the entry
370
+ _hash = mad_hash(key,
371
+ self._scale,
372
+ self._shift,
373
+ self.prime,
374
+ self.mcapacity)
375
+
376
+ if self.hash_table is not None:
377
+ # checking the bucket
378
+ _bucket = self.hash_table.get(_hash)
379
+ # check if the bucket is empty
380
+ if not _bucket.empty:
381
+ # Create temporary MapEntry for searching, because cmp_function needs MapEntry
382
+ _temp_entry = MapEntry(key, None)
383
+ _idx = _bucket.index_of(_temp_entry)
384
+ # the entry is not in the bucket, add it and a collision
385
+ # the entry is already in the bucket, update it
386
+ if _idx > -1:
387
+ _bucket.update(_new_entry, _idx)
388
+ # otherwise, is a new entry
389
+ else:
390
+ if _bucket.size >= 1:
391
+ self._collisions += 1
392
+ _bucket.append(_new_entry)
393
+ self._size += 1
394
+ self._cur_alpha = self._size / self.mcapacity
395
+ # check if the structure needs to be rehashed
396
+ if self._cur_alpha >= self.max_alpha:
397
+ self.resize()
398
+
399
+ def get_entry(self, key: T) -> Optional[MapEntry]:
400
+ """*get_entry()* retrieves an entry from the *SCHashTable* using the provided key.
401
+
402
+ Args:
403
+ key (T): key for the entry.
404
+
405
+ Raises:
406
+ IndexError: error if the *SCHashTable* is empty.
407
+
408
+ Returns:
409
+ Optional[MapEntry]: *MapEntry* object with the key-value pair if found, None otherwise.
410
+ """
411
+ if self.empty:
412
+ raise IndexError("Empty data structure")
413
+ # assume the entry is not in the structure
414
+ entry = None
415
+ idx = -1
416
+ # get the hash key for the entry
417
+ _hash = mad_hash(key,
418
+ self._scale,
419
+ self._shift,
420
+ self.prime,
421
+ self.mcapacity)
422
+
423
+ if self.hash_table is not None:
424
+ # checking the bucket
425
+ _bucket = self.hash_table.get(_hash)
426
+ # check if the bucket is empty
427
+ if not _bucket.empty:
428
+ # Create temporary MapEntry for searching, because cmp_function needs MapEntry
429
+ _temp_entry = MapEntry(key, None)
430
+ idx = _bucket.index_of(_temp_entry)
431
+ # if the entry is in the bucket, return it
432
+ if idx > -1:
433
+ entry = _bucket.get(idx)
434
+
435
+ # otherwise, return entry
436
+ return entry
437
+
438
+ def get_bucket(self, key: T) -> Optional[Bucket]:
439
+ """*get_bucket()* retrieves the bucket containing the key-value pair from the *SCHashTable* using the provided key.
440
+
441
+ Args:
442
+ key (T): key for the entry.
443
+
444
+ Raises:
445
+ IndexError: error if the *SCHashTable* is empty.
446
+
447
+ Returns:
448
+ Optional[Bucket]: *Bucket* object containing the key-value pair if found, None otherwise.
449
+ """
450
+ if self.empty:
451
+ raise IndexError("Empty data structure")
452
+ # assume the entry is not in the structure
453
+ _bucket = None
454
+ # get the hash key for the entry
455
+ _hash = mad_hash(key,
456
+ self._scale,
457
+ self._shift,
458
+ self.prime,
459
+ self.mcapacity)
460
+
461
+ # recover the bucket
462
+ if self.hash_table is not None:
463
+ _bucket = self.hash_table.get(_hash)
464
+ # ceck if the bucket is empty
465
+ if _bucket.empty:
466
+ _bucket = None
467
+
468
+ # otherwise, return the bucket
469
+ return _bucket
470
+
471
+ def is_present(self, key: T) -> bool:
472
+ """*is_present()* checks if the provided key is present in the *SCHashTable*.
473
+
474
+ Args:
475
+ key (T): key for the entry.
476
+
477
+ Raises:
478
+ IndexError: error if the *SCHashTable* is empty.
479
+
480
+ Returns:
481
+ bool: True if the key is present in the *SCHashTable*, False otherwise.
482
+ """
483
+ if self.empty:
484
+ raise IndexError("Empty data structure")
485
+ # assume the entry is not in the structure
486
+ found = False
487
+ # use the MAD compression function to get the hash key
488
+ _hash = mad_hash(key,
489
+ self._scale,
490
+ self._shift,
491
+ self.prime,
492
+ self.mcapacity)
493
+
494
+ if self.hash_table is not None:
495
+ # look into the bucket
496
+ _bucket = self.hash_table.get(_hash)
497
+ # Create temporary MapEntry for searching
498
+ _temp_entry = MapEntry(key, None)
499
+ _idx = _bucket.index_of(_temp_entry)
500
+ # if the entry is in the bucket, return True
501
+ if _idx > -1:
502
+ found = True
503
+
504
+ # otherwise, return foin flag
505
+ return found
506
+
507
+ def delete(self, key: T) -> Optional[MapEntry]:
508
+ """*delete()* removes an entry from the *SCHashTable* using the provided key.
509
+
510
+ Args:
511
+ key (T): key for the entry.
512
+
513
+ Raises:
514
+ IndexError: error if the *SCHashTable* is empty.
515
+
516
+ Returns:
517
+ Optional[MapEntry]: *MapEntry* object with the key-value pair if found, None otherwise.
518
+ """
519
+ if self.empty:
520
+ raise IndexError("Empty data structure")
521
+ # assume the entry is not in the structure
522
+ _entry = None
523
+ _idx = -1
524
+ # get the hash key for the entry
525
+ _hash = mad_hash(key,
526
+ self._scale,
527
+ self._shift,
528
+ self.prime,
529
+ self.mcapacity)
530
+
531
+ if self.hash_table is not None:
532
+ # checking the bucket
533
+ _bucket = self.hash_table.get(_hash)
534
+ # check if the bucket is not empty
535
+ if not _bucket.empty:
536
+ # Create temporary MapEntry for searching
537
+ _temp_entry = MapEntry(key, None)
538
+ _idx = _bucket.index_of(_temp_entry)
539
+ # if the entry is in the bucket, remove it
540
+ if _idx > -1:
541
+ _entry = _bucket.remove(_idx)
542
+ self.hash_table.update(_bucket, _hash)
543
+ # updating collisions
544
+ if _bucket.size > 1:
545
+ self._collisions -= 1
546
+ # updating size and alpha
547
+ self._size -= 1
548
+ self._cur_alpha = self._size / self.mcapacity
549
+ # TODO old code, check if needed
550
+ # elif _idx == -1:
551
+ # raise IndexError(f"Entry for Key: {key} not found")
552
+ if self._cur_alpha < self.min_alpha:
553
+ self.resize()
554
+ return _entry
555
+
556
+ def keys(self) -> SingleLinkedList[T]:
557
+ """*keys()* returns a single linked list of keys from the *SCHashTable*.
558
+
559
+ Returns:
560
+ SingleLinkedList[T]: list of keys from the *SCHashTable*. e.g. [key1, key2, ...].
561
+ """
562
+ _keys_lt = SingleLinkedList(key=self.key)
563
+ if self.hash_table is not None:
564
+ for _bucket in self.hash_table:
565
+ if not _bucket.empty:
566
+ for _entry in _bucket:
567
+ _keys_lt.append(_entry.key)
568
+ return _keys_lt
569
+
570
+ def values(self) -> SingleLinkedList[T]:
571
+ """*values()* returns a single linked list of values from the *SCHashTable*.
572
+
573
+ Returns:
574
+ SingleLinkedList[T]: list of values from the *SCHashTable*. e.g. [value1, value2, ...].
575
+ """
576
+ _values_lt = SingleLinkedList(key=self.key)
577
+ if self.hash_table is not None:
578
+ for _bucket in self.hash_table:
579
+ if not _bucket.empty:
580
+ for _entry in _bucket:
581
+ _values_lt.append(_entry.value)
582
+ return _values_lt
583
+
584
+ def entries(self) -> SingleLinkedList[T]:
585
+ """*entries() returns a list of tuples with the key and value of each entry in the hash table.
586
+
587
+ Returns:
588
+ SingleLinkedList[T]: list of tuples with the key-value paor of each entry in the hash table. e.g. [(key1, value1), (key2, value2), ...].
589
+ """
590
+ _entries_lt = SingleLinkedList(key=self.key)
591
+ if self.hash_table is not None:
592
+ for _bucket in self.hash_table:
593
+ if not _bucket.empty:
594
+ for _entry in _bucket:
595
+ _data = (_entry.key, _entry.value)
596
+ _entries_lt.append(_data)
597
+ return _entries_lt
598
+
599
+ def resize(self) -> None:
600
+ """*resize()* rehashes the *SCHashTable* by creating a new hash table with a new capacity (M) and rehashing all the entries from the old hash table to the new one. It also updates the size, collisions and current load factor.
601
+ """
602
+ try:
603
+ # check if the structure is rehashable
604
+ if self.rehashable:
605
+ # gettting the current capacity to avoid null errors
606
+ new_capacity = self.mcapacity
607
+ # find the new capacity according to limits
608
+ # augmenting the capacity
609
+ if self._cur_alpha >= self.max_alpha:
610
+ new_capacity = next_prime(self.mcapacity * 2)
611
+ # reducing the capacity
612
+ elif self._cur_alpha < self.min_alpha:
613
+ new_capacity = next_prime(self.mcapacity // 2)
614
+
615
+ # asigning the new capacity
616
+ self.mcapacity = new_capacity
617
+
618
+ # reseting the size, collisions and current load factor
619
+ self._size = 0
620
+ self._collisions = 0
621
+ self._cur_alpha = 0
622
+
623
+ # creating the new hash table
624
+ new_table = ArrayList[Bucket[T]](cmp_function=None,
625
+ key=self.key)
626
+ # keep in memory the old hash table
627
+ old_table = self.hash_table
628
+
629
+ # Create the empty buckets in thenew hash table
630
+ i = 0
631
+ while i < self.mcapacity:
632
+ # bucket is a SingleLinkedList list
633
+ bucket = Bucket(cmp_function=self.cmp_function,
634
+ key=self.key)
635
+ new_table.append(bucket)
636
+ i += 1
637
+
638
+ # replace the old table with the new one
639
+ self.hash_table = new_table
640
+
641
+ if old_table is not None:
642
+ # iterate over the old table
643
+ for bucket in old_table:
644
+ if not bucket.empty:
645
+ for entry in bucket:
646
+ key = entry.key
647
+ value = entry.value
648
+ if None not in (key, value):
649
+ self.insert(cast(T, key), cast(T, value))
650
+ except Exception as err:
651
+ self._error_handler(err)
652
+
653
+ def _error_handler(self, err: Exception) -> None:
654
+ """*_error_handler()* to process the context (package/class), function name (method), and the error (exception) that was raised to format a detailed error message and traceback.
655
+
656
+ Args:
657
+ err (Exception): Python raised exception.
658
+ """
659
+ _context = self.__class__.__name__
660
+ _function_name = "unknown"
661
+ frame = inspect.currentframe()
662
+ if frame is not None:
663
+ if frame.f_back is not None:
664
+ _function_name = frame.f_back.f_code.co_name
665
+ else:
666
+ _function_name = "unknown"
667
+ error(_context, _function_name, err)
668
+
669
+ def _validate_type(self, entry: MapEntry) -> bool:
670
+ """*_validate_type()* validates the type of the *MapEntry* against the expected type in the *SCHashTable*. It raises a *TypeError* if the types do not match.
671
+
672
+ Args:
673
+ entry (MapEntry): *MapEntry* object to validate.
674
+
675
+ Raises:
676
+ TypeError: error if the type of the *MapEntry* does not match the expected type in the *SCHashTable*.
677
+
678
+ Returns:
679
+ bool: True if the type of the *MapEntry* matches the expected type in the *SCHashTable*, False otherwise.
680
+ """
681
+ # TODO check usability of this function
682
+ # if datastruct is empty, set the entry type
683
+ key = entry.key
684
+ value = entry.value
685
+ if self.empty:
686
+ self._key_type = type(key)
687
+ self._value_type = type(value)
688
+ # check if the new entry is the same type as the other entries
689
+ valid = self._validate_key_type(entry)
690
+ valid = valid and self._validate_value_type(entry)
691
+ # otherwise, the type is valid
692
+ return valid
693
+
694
+ def _validate_key_type(self, entry: MapEntry) -> bool:
695
+ """*_validate_key_type()* validates the type of the key in the *MapEntry* against the expected type in the *SCHashTable*. It raises a *TypeError* if the types do not match.
696
+
697
+ Args:
698
+ entry (MapEntry): *MapEntry* object to validate.
699
+
700
+ Raises:
701
+ TypeError: error if the type of the key in the *MapEntry* does not match the expected type in the *SCHashTable*.
702
+
703
+ Returns:
704
+ bool: True if the type of the key in the *MapEntry* matches the expected type in the *SCHashTable*, False otherwise.-
705
+ """
706
+ key = entry.key
707
+ # if the new entry is the same type as the other entries
708
+ if self._key_type is not type(key):
709
+ err_msg = f"Invalid key type: {type(key)} "
710
+ err_msg += f"for struct configured with type: {self._key_type}"
711
+ raise TypeError(err_msg)
712
+ # otherwise, the type is valid
713
+ return True
714
+
715
+ def _validate_value_type(self, entry: MapEntry) -> bool:
716
+ """*_validate_value_type()* validates the type of the value in the *MapEntry* against the expected type in the *SCHashTable*. It raises a *TypeError* if the types do not match.
717
+
718
+ Args:
719
+ entry (MapEntry): *MapEntry* object to validate.
720
+
721
+ Raises:
722
+ TypeError: error if the type of the value in the *MapEntry* does not match the expected type in the *SCHashTable*.
723
+
724
+ Returns:
725
+ bool: True if the type of the value in the *MapEntry* matches the expected type in the *SCHashTable*, False otherwise.
726
+ """
727
+ value = entry.value
728
+ # if the new entry is the same type as the other entries
729
+ if self._value_type is not type(value):
730
+ err_msg = f"Invalid value type: {type(value)} "
731
+ err_msg += f"for struct configured with type: {self._value_type}"
732
+ raise TypeError(err_msg)
733
+ # otherwise, the type is valid
734
+ return True
735
+
736
+ def __len__(self) -> int:
737
+ """*__len__()* function to return the number of entries (n) in the *SCHashTable*.
738
+
739
+ Returns:
740
+ int: Number of entries (n) in the *SCHashTable*.
741
+ """
742
+ return self._size
743
+
744
+ def __str__(self) -> str:
745
+ """*__str__()* function to return a string representation of the *SCHashTable*.
746
+
747
+ Returns:
748
+ str: string representation of the *SCHashTable*.
749
+ """
750
+ _attr_lt = []
751
+ for attr, value in vars(self).items():
752
+ # Skip private attributes starting with "__"
753
+ if attr.startswith("__"):
754
+ continue
755
+ # Format callable attributes
756
+ if callable(value):
757
+ try:
758
+ value = f"{value.__name__}{inspect.signature(value)}"
759
+ except ValueError:
760
+ value = repr(value) # Fallback for non-standard callables
761
+ # Format attribute name and value
762
+ _attr_name = attr.lstrip("_")
763
+ _attr_lt.append(f"{_attr_name}={repr(value)}")
764
+ # Format the string representation of the ArrayList class and its attributes
765
+ _str = f"{self.__class__.__name__}({', '.join(_attr_lt)})"
766
+ return _str
767
+
768
+ def __repr__(self) -> str:
769
+ """*__repr__()* function to return a string representation of the *SCHashTable*.
770
+
771
+ Returns:
772
+ str: string representation of the *SCHashTable*.
773
+ """
774
+ return self.__str__()