mapFolding 0.3.8__py3-none-any.whl → 0.3.10__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (48) hide show
  1. mapFolding/__init__.py +56 -0
  2. mapFolding/basecamp.py +55 -0
  3. mapFolding/beDRY.py +376 -0
  4. mapFolding/oeis.py +339 -0
  5. mapFolding/someAssemblyRequired/__init__.py +2 -0
  6. {someAssemblyRequired → mapFolding/someAssemblyRequired}/makeJob.py +4 -5
  7. mapFolding/someAssemblyRequired/synthesizeJobNumba.py +383 -0
  8. mapFolding/someAssemblyRequired/synthesizeModuleJAX.py +29 -0
  9. {someAssemblyRequired → mapFolding/someAssemblyRequired}/synthesizeModulesNumba.py +186 -99
  10. syntheticModules/numbaInitialize.py → mapFolding/syntheticModules/numba_countInitialize.py +6 -9
  11. syntheticModules/numbaParallel.py → mapFolding/syntheticModules/numba_countParallel.py +4 -4
  12. syntheticModules/numbaSequential.py → mapFolding/syntheticModules/numba_countSequential.py +5 -5
  13. mapFolding/syntheticModules/numba_doTheNeedful.py +30 -0
  14. mapFolding/theDao.py +213 -0
  15. mapFolding/theSSOT.py +251 -0
  16. mapFolding/theSSOTnumba.py +115 -0
  17. mapFolding-0.3.10.dist-info/LICENSE +407 -0
  18. {mapFolding-0.3.8.dist-info → mapFolding-0.3.10.dist-info}/METADATA +9 -11
  19. mapFolding-0.3.10.dist-info/RECORD +40 -0
  20. mapFolding-0.3.10.dist-info/top_level.txt +2 -0
  21. tests/__init__.py +1 -0
  22. tests/conftest.py +183 -0
  23. tests/conftest_tmpRegistry.py +62 -0
  24. tests/conftest_uniformTests.py +53 -0
  25. tests/test_oeis.py +141 -0
  26. tests/test_other.py +259 -0
  27. tests/test_tasks.py +44 -0
  28. tests/test_types.py +5 -0
  29. benchmarks/benchmarking.py +0 -67
  30. citations/constants.py +0 -3
  31. citations/updateCitation.py +0 -354
  32. mapFolding-0.3.8.dist-info/RECORD +0 -26
  33. mapFolding-0.3.8.dist-info/top_level.txt +0 -5
  34. someAssemblyRequired/__init__.py +0 -1
  35. someAssemblyRequired/synthesizeModuleJobNumba.py +0 -212
  36. syntheticModules/__init__.py +0 -3
  37. {reference → mapFolding/reference}/flattened.py +0 -0
  38. {reference → mapFolding/reference}/hunterNumba.py +0 -0
  39. {reference → mapFolding/reference}/irvineJavaPort.py +0 -0
  40. {reference → mapFolding/reference}/jax.py +0 -0
  41. {reference → mapFolding/reference}/lunnan.py +0 -0
  42. {reference → mapFolding/reference}/lunnanNumpy.py +0 -0
  43. {reference → mapFolding/reference}/lunnanWhile.py +0 -0
  44. {reference → mapFolding/reference}/rotatedEntryPoint.py +0 -0
  45. {reference → mapFolding/reference}/total_countPlus1vsPlusN.py +0 -0
  46. {someAssemblyRequired → mapFolding/someAssemblyRequired}/getLLVMforNoReason.py +0 -0
  47. {mapFolding-0.3.8.dist-info → mapFolding-0.3.10.dist-info}/WHEEL +0 -0
  48. {mapFolding-0.3.8.dist-info → mapFolding-0.3.10.dist-info}/entry_points.txt +0 -0
mapFolding/theDao.py ADDED
@@ -0,0 +1,213 @@
1
+ from mapFolding import indexMy, indexTrack
2
+ from numpy import dtype, integer, ndarray
3
+ from typing import Any, Tuple
4
+ import numba
5
+ import numpy
6
+
7
+ def activeGapIncrement(my: ndarray[Tuple[int], dtype[integer[Any]]]) -> None:
8
+ my[indexMy.gap1ndex.value] += 1
9
+
10
+ def activeLeafGreaterThan0Condition(my: ndarray[Tuple[int], dtype[integer[Any]]]) -> Any:
11
+ return my[indexMy.leaf1ndex.value]
12
+
13
+ def activeLeafGreaterThanLeavesTotalCondition(foldGroups: ndarray[Tuple[int], dtype[integer[Any]]], my: ndarray[Tuple[int], dtype[integer[Any]]]) -> Any:
14
+ return my[indexMy.leaf1ndex.value] > foldGroups[-1]
15
+
16
+ def activeLeafIsTheFirstLeafCondition(my: ndarray[Tuple[int], dtype[integer[Any]]]) -> Any:
17
+ return my[indexMy.leaf1ndex.value] <= 1
18
+
19
+ def allDimensionsAreUnconstrained(my: ndarray[Tuple[int], dtype[integer[Any]]]) -> Any:
20
+ return not my[indexMy.dimensionsUnconstrained.value]
21
+
22
+ def backtrack(my: ndarray[Tuple[int], dtype[integer[Any]]], track: ndarray[Tuple[int, int], dtype[integer[Any]]]) -> None:
23
+ my[indexMy.leaf1ndex.value] -= 1
24
+ track[indexTrack.leafBelow.value, track[indexTrack.leafAbove.value, my[indexMy.leaf1ndex.value]]] = track[indexTrack.leafBelow.value, my[indexMy.leaf1ndex.value]]
25
+ track[indexTrack.leafAbove.value, track[indexTrack.leafBelow.value, my[indexMy.leaf1ndex.value]]] = track[indexTrack.leafAbove.value, my[indexMy.leaf1ndex.value]]
26
+
27
+ def backtrackCondition(my: ndarray[Tuple[int], dtype[integer[Any]]], track: ndarray[Tuple[int, int], dtype[integer[Any]]]) -> Any:
28
+ return my[indexMy.leaf1ndex.value] and my[indexMy.gap1ndex.value] == track[indexTrack.gapRangeStart.value, my[indexMy.leaf1ndex.value] - 1]
29
+
30
+ def gap1ndexCeilingIncrement(my: ndarray[Tuple[int], dtype[integer[Any]]]) -> None:
31
+ my[indexMy.gap1ndexCeiling.value] += 1
32
+
33
+ def countGaps(gapsWhere: ndarray[Tuple[int], dtype[integer[Any]]], my: ndarray[Tuple[int], dtype[integer[Any]]], track: ndarray[Tuple[int, int], dtype[integer[Any]]]) -> None:
34
+ gapsWhere[my[indexMy.gap1ndexCeiling.value]] = my[indexMy.leafConnectee.value]
35
+ if track[indexTrack.countDimensionsGapped.value, my[indexMy.leafConnectee.value]] == 0:
36
+ gap1ndexCeilingIncrement(my=my)
37
+ track[indexTrack.countDimensionsGapped.value, my[indexMy.leafConnectee.value]] += 1
38
+
39
+ def dimension1ndexIncrement(my: ndarray[Tuple[int], dtype[integer[Any]]]) -> None:
40
+ my[indexMy.indexDimension.value] += 1
41
+
42
+ def dimensionsUnconstrainedCondition(connectionGraph: ndarray[Tuple[int, int, int], dtype[integer[Any]]], my: ndarray[Tuple[int], dtype[integer[Any]]]) -> Any:
43
+ return connectionGraph[my[indexMy.indexDimension.value], my[indexMy.leaf1ndex.value], my[indexMy.leaf1ndex.value]] == my[indexMy.leaf1ndex.value]
44
+
45
+ def dimensionsUnconstrainedDecrement(my: ndarray[Tuple[int], dtype[integer[Any]]]) -> None:
46
+ my[indexMy.dimensionsUnconstrained.value] -= 1
47
+
48
+ def filterCommonGaps(gapsWhere: ndarray[Tuple[int], dtype[integer[Any]]], my: ndarray[Tuple[int], dtype[integer[Any]]], track: ndarray[Tuple[int, int], dtype[integer[Any]]]) -> None:
49
+ gapsWhere[my[indexMy.gap1ndex.value]] = gapsWhere[my[indexMy.indexMiniGap.value]]
50
+ if track[indexTrack.countDimensionsGapped.value, gapsWhere[my[indexMy.indexMiniGap.value]]] == my[indexMy.dimensionsUnconstrained.value]:
51
+ activeGapIncrement(my=my)
52
+ track[indexTrack.countDimensionsGapped.value, gapsWhere[my[indexMy.indexMiniGap.value]]] = 0
53
+
54
+ def findGapsInitializeVariables(my: ndarray[Tuple[int], dtype[integer[Any]]], track: ndarray[Tuple[int, int], dtype[integer[Any]]]) -> None:
55
+ my[indexMy.dimensionsUnconstrained.value] = my[indexMy.dimensionsTotal.value]
56
+ my[indexMy.gap1ndexCeiling.value] = track[indexTrack.gapRangeStart.value, my[indexMy.leaf1ndex.value] - 1]
57
+ my[indexMy.indexDimension.value] = 0
58
+
59
+ def indexMiniGapIncrement(my: ndarray[Tuple[int], dtype[integer[Any]]]) -> None:
60
+ my[indexMy.indexMiniGap.value] += 1
61
+
62
+ def indexMiniGapInitialization(my: ndarray[Tuple[int], dtype[integer[Any]]]) -> None:
63
+ my[indexMy.indexMiniGap.value] = my[indexMy.gap1ndex.value]
64
+
65
+ def insertUnconstrainedLeaf(gapsWhere: ndarray[Tuple[int], dtype[integer[Any]]], my: ndarray[Tuple[int], dtype[integer[Any]]]) -> None:
66
+ my[indexMy.indexLeaf.value] = 0
67
+ while my[indexMy.indexLeaf.value] < my[indexMy.leaf1ndex.value]:
68
+ gapsWhere[my[indexMy.gap1ndexCeiling.value]] = my[indexMy.indexLeaf.value]
69
+ my[indexMy.gap1ndexCeiling.value] += 1
70
+ my[indexMy.indexLeaf.value] += 1
71
+
72
+ def leafBelowSentinelIs1Condition(track: ndarray[Tuple[int, int], dtype[integer[Any]]]) -> Any:
73
+ return track[indexTrack.leafBelow.value, 0] == 1
74
+
75
+ def leafConnecteeInitialization(connectionGraph: ndarray[Tuple[int, int, int], dtype[integer[Any]]], my: ndarray[Tuple[int], dtype[integer[Any]]]) -> None:
76
+ my[indexMy.leafConnectee.value] = connectionGraph[my[indexMy.indexDimension.value], my[indexMy.leaf1ndex.value], my[indexMy.leaf1ndex.value]]
77
+
78
+ def leafConnecteeUpdate(connectionGraph: ndarray[Tuple[int, int, int], dtype[integer[Any]]], my: ndarray[Tuple[int], dtype[integer[Any]]], track: ndarray[Tuple[int, int], dtype[integer[Any]]]) -> None:
79
+ my[indexMy.leafConnectee.value] = connectionGraph[my[indexMy.indexDimension.value], my[indexMy.leaf1ndex.value], track[indexTrack.leafBelow.value, my[indexMy.leafConnectee.value]]]
80
+
81
+ def loopingLeavesConnectedToActiveLeaf(my: ndarray[Tuple[int], dtype[integer[Any]]]) -> Any:
82
+ return my[indexMy.leafConnectee.value] != my[indexMy.leaf1ndex.value]
83
+
84
+ def loopingTheDimensions(my: ndarray[Tuple[int], dtype[integer[Any]]]) -> Any:
85
+ return my[indexMy.indexDimension.value] < my[indexMy.dimensionsTotal.value]
86
+
87
+ def loopingToActiveGapCeiling(my: ndarray[Tuple[int], dtype[integer[Any]]]) -> Any:
88
+ return my[indexMy.indexMiniGap.value] < my[indexMy.gap1ndexCeiling.value]
89
+
90
+ def placeLeaf(gapsWhere: ndarray[Tuple[int], dtype[integer[Any]]], my: ndarray[Tuple[int], dtype[integer[Any]]], track: ndarray[Tuple[int, int], dtype[integer[Any]]]) -> None:
91
+ my[indexMy.gap1ndex.value] -= 1
92
+ track[indexTrack.leafAbove.value, my[indexMy.leaf1ndex.value]] = gapsWhere[my[indexMy.gap1ndex.value]]
93
+ track[indexTrack.leafBelow.value, my[indexMy.leaf1ndex.value]] = track[indexTrack.leafBelow.value, track[indexTrack.leafAbove.value, my[indexMy.leaf1ndex.value]]]
94
+ track[indexTrack.leafBelow.value, track[indexTrack.leafAbove.value, my[indexMy.leaf1ndex.value]]] = my[indexMy.leaf1ndex.value]
95
+ track[indexTrack.leafAbove.value, track[indexTrack.leafBelow.value, my[indexMy.leaf1ndex.value]]] = my[indexMy.leaf1ndex.value]
96
+ track[indexTrack.gapRangeStart.value, my[indexMy.leaf1ndex.value]] = my[indexMy.gap1ndex.value]
97
+ my[indexMy.leaf1ndex.value] += 1
98
+
99
+ def placeLeafCondition(my: ndarray[Tuple[int], dtype[integer[Any]]]) -> Any:
100
+ return my[indexMy.leaf1ndex.value]
101
+
102
+ def thereAreComputationDivisionsYouMightSkip(my: ndarray[Tuple[int], dtype[integer[Any]]]) -> Any:
103
+ return my[indexMy.leaf1ndex.value] != my[indexMy.taskDivisions.value] or my[indexMy.leafConnectee.value] % my[indexMy.taskDivisions.value] == my[indexMy.taskIndex.value]
104
+
105
+ def countInitialize(connectionGraph: ndarray[Tuple[int, int, int], dtype[integer[Any]]]
106
+ , gapsWhere: ndarray[Tuple[int], dtype[integer[Any]]]
107
+ , my: ndarray[Tuple[int], dtype[integer[Any]]]
108
+ , track: ndarray[Tuple[int, int], dtype[integer[Any]]]) -> None:
109
+ while activeLeafGreaterThan0Condition(my=my):
110
+ if activeLeafIsTheFirstLeafCondition(my=my) or leafBelowSentinelIs1Condition(track=track):
111
+ findGapsInitializeVariables(my=my, track=track)
112
+ while loopingTheDimensions(my=my):
113
+ if dimensionsUnconstrainedCondition(connectionGraph=connectionGraph, my=my):
114
+ dimensionsUnconstrainedDecrement(my=my)
115
+ else:
116
+ leafConnecteeInitialization(connectionGraph=connectionGraph, my=my)
117
+ while loopingLeavesConnectedToActiveLeaf(my=my):
118
+ countGaps(gapsWhere=gapsWhere, my=my, track=track)
119
+ leafConnecteeUpdate(connectionGraph=connectionGraph, my=my, track=track)
120
+ dimension1ndexIncrement(my=my)
121
+ if allDimensionsAreUnconstrained(my=my):
122
+ insertUnconstrainedLeaf(gapsWhere=gapsWhere, my=my)
123
+ indexMiniGapInitialization(my=my)
124
+ while loopingToActiveGapCeiling(my=my):
125
+ filterCommonGaps(gapsWhere=gapsWhere, my=my, track=track)
126
+ indexMiniGapIncrement(my=my)
127
+ if placeLeafCondition(my=my):
128
+ placeLeaf(gapsWhere=gapsWhere, my=my, track=track)
129
+ if my[indexMy.gap1ndex.value] > 0:
130
+ return
131
+
132
+ def countParallel(connectionGraph: ndarray[Tuple[int, int, int], dtype[integer[Any]]]
133
+ , foldGroups: ndarray[Tuple[int], dtype[integer[Any]]]
134
+ , gapsWhere: ndarray[Tuple[int], dtype[integer[Any]]]
135
+ , my: ndarray[Tuple[int], dtype[integer[Any]]]
136
+ , track: ndarray[Tuple[int, int], dtype[integer[Any]]]) -> None:
137
+ gapsWherePARALLEL = gapsWhere.copy()
138
+ myPARALLEL = my.copy()
139
+ trackPARALLEL = track.copy()
140
+ taskDivisionsPrange = myPARALLEL[indexMy.taskDivisions.value]
141
+ for indexSherpa in numba.prange(taskDivisionsPrange):
142
+ groupsOfFolds: int = 0
143
+ gapsWhere = gapsWherePARALLEL.copy()
144
+ my = myPARALLEL.copy()
145
+ my[indexMy.taskIndex.value] = indexSherpa
146
+ track = trackPARALLEL.copy()
147
+ while activeLeafGreaterThan0Condition(my=my):
148
+ if activeLeafIsTheFirstLeafCondition(my=my) or leafBelowSentinelIs1Condition(track=track):
149
+ if activeLeafGreaterThanLeavesTotalCondition(foldGroups=foldGroups, my=my):
150
+ groupsOfFolds += 1
151
+ else:
152
+ findGapsInitializeVariables(my=my, track=track)
153
+ while loopingTheDimensions(my=my):
154
+ if dimensionsUnconstrainedCondition(connectionGraph=connectionGraph, my=my):
155
+ dimensionsUnconstrainedDecrement(my=my)
156
+ else:
157
+ leafConnecteeInitialization(connectionGraph=connectionGraph, my=my)
158
+ while loopingLeavesConnectedToActiveLeaf(my=my):
159
+ if thereAreComputationDivisionsYouMightSkip(my=my):
160
+ countGaps(gapsWhere=gapsWhere, my=my, track=track)
161
+ leafConnecteeUpdate(connectionGraph=connectionGraph, my=my, track=track)
162
+ dimension1ndexIncrement(my=my)
163
+ indexMiniGapInitialization(my=my)
164
+ while loopingToActiveGapCeiling(my=my):
165
+ filterCommonGaps(gapsWhere=gapsWhere, my=my, track=track)
166
+ indexMiniGapIncrement(my=my)
167
+ while backtrackCondition(my=my, track=track):
168
+ backtrack(my=my, track=track)
169
+ if placeLeafCondition(my=my):
170
+ placeLeaf(gapsWhere=gapsWhere, my=my, track=track)
171
+ foldGroups[my[indexMy.taskIndex.value]] = groupsOfFolds
172
+
173
+ def countSequential(connectionGraph: ndarray[Tuple[int, int, int], dtype[integer[Any]]], foldGroups: ndarray[Tuple[int], dtype[integer[Any]]], gapsWhere: ndarray[Tuple[int], dtype[integer[Any]]], my: ndarray[Tuple[int], dtype[integer[Any]]], track: ndarray[Tuple[int, int], dtype[integer[Any]]]) -> None:
174
+ groupsOfFolds: int = 0
175
+ doFindGaps = True
176
+ while activeLeafGreaterThan0Condition(my=my):
177
+ if ((doFindGaps := activeLeafIsTheFirstLeafCondition(my=my) or leafBelowSentinelIs1Condition(track=track))
178
+ and activeLeafGreaterThanLeavesTotalCondition(foldGroups=foldGroups, my=my)):
179
+ groupsOfFolds += 1
180
+ elif doFindGaps:
181
+ findGapsInitializeVariables(my=my, track=track)
182
+ while loopingTheDimensions(my=my):
183
+ if dimensionsUnconstrainedCondition(connectionGraph=connectionGraph, my=my):
184
+ dimensionsUnconstrainedDecrement(my=my)
185
+ else:
186
+ leafConnecteeInitialization(connectionGraph=connectionGraph, my=my)
187
+ while loopingLeavesConnectedToActiveLeaf(my=my):
188
+ countGaps(gapsWhere=gapsWhere, my=my, track=track)
189
+ leafConnecteeUpdate(connectionGraph=connectionGraph, my=my, track=track)
190
+ dimension1ndexIncrement(my=my)
191
+ indexMiniGapInitialization(my=my)
192
+ while loopingToActiveGapCeiling(my=my):
193
+ filterCommonGaps(gapsWhere=gapsWhere, my=my, track=track)
194
+ indexMiniGapIncrement(my=my)
195
+ while backtrackCondition(my=my, track=track):
196
+ backtrack(my=my, track=track)
197
+ if placeLeafCondition(my=my):
198
+ placeLeaf(gapsWhere=gapsWhere, my=my, track=track)
199
+ foldGroups[my[indexMy.taskIndex.value]] = groupsOfFolds
200
+
201
+ def doTheNeedful(connectionGraph: ndarray[Tuple[int, int, int], dtype[integer[Any]]]
202
+ , foldGroups: ndarray[Tuple[int], dtype[integer[Any]]]
203
+ , gapsWhere: ndarray[Tuple[int], dtype[integer[Any]]]
204
+ , mapShape: ndarray[Tuple[int], dtype[integer[Any]]]
205
+ , my: ndarray[Tuple[int], dtype[integer[Any]]]
206
+ , track: ndarray[Tuple[int, int], dtype[integer[Any]]]
207
+ ) -> None:
208
+ countInitialize(connectionGraph, gapsWhere, my, track)
209
+
210
+ if my[indexMy.taskDivisions.value] > 0:
211
+ countParallel(connectionGraph, foldGroups, gapsWhere, my, track)
212
+ else:
213
+ countSequential(connectionGraph, foldGroups, gapsWhere, my, track)
mapFolding/theSSOT.py ADDED
@@ -0,0 +1,251 @@
1
+ from collections import defaultdict
2
+ from mapFolding.theSSOTnumba import *
3
+ from numpy import integer
4
+ from types import ModuleType
5
+ from typing import Any, Callable, Dict, Final, Optional, Tuple, Type, TYPE_CHECKING, cast
6
+ import enum
7
+ import numba
8
+ import numpy
9
+ import numpy.typing
10
+ import pathlib
11
+ import sys
12
+
13
+ try:
14
+ from typing import NotRequired
15
+ except ImportError:
16
+ from typing_extensions import NotRequired
17
+
18
+ if TYPE_CHECKING:
19
+ from typing import TypedDict
20
+ else:
21
+ TypedDict = dict
22
+
23
+ """I have hobbled together:
24
+ TypedDict, Enum, defaultdict, and lookup dictionaries to make DIY immutability and delayed realization/instantiation.
25
+ Nevertheless, I am both confident that all of these processes will be replaced and completely ignorant of what will replace them."""
26
+
27
+ """Technical concepts I am likely using and likely want to use more effectively:
28
+ - Configuration Registry
29
+ - Write-Once, Read-Many (WORM) / Immutable Initialization
30
+ - Lazy Initialization
31
+ - Separation of Concerns: in the sense that configuration is separated from business logic
32
+
33
+ Furthermore, I want to more clearly divorce the concept of a single _source_ of (a) truth from
34
+ the _authority_ of that truth. The analogy to a registry of ownership is still apt: the registry
35
+ is, at most, a single (or centralized) source of truth, but it is merely the place to register/record
36
+ the truth determined by some other authority.
37
+
38
+ And, I almost certainly want to change the semiotics from "authority" (of truth) to "power" (to create a truth).
39
+ Here, "power" is a direct analogy to https://hunterthinks.com/opinion/a-hohfeldian-primer.
40
+ """
41
+
42
+ myPackageNameIs = "mapFolding"
43
+
44
+ moduleOfSyntheticModules = "syntheticModules"
45
+
46
+ def getPathPackage() -> pathlib.Path:
47
+ import importlib, inspect
48
+ pathPackage = pathlib.Path(inspect.getfile(importlib.import_module(myPackageNameIs)))
49
+ if pathPackage.is_file():
50
+ pathPackage = pathPackage.parent
51
+ return pathPackage
52
+
53
+ def getPathJobRootDEFAULT() -> pathlib.Path:
54
+ if 'google.colab' in sys.modules:
55
+ pathJobDEFAULT = pathlib.Path("/content/drive/MyDrive") / "jobs"
56
+ else:
57
+ pathJobDEFAULT = getPathPackage() / "jobs"
58
+ return pathJobDEFAULT
59
+
60
+ def getPathSyntheticModules() -> pathlib.Path:
61
+ pathSyntheticModules = getPathPackage() / moduleOfSyntheticModules
62
+ return pathSyntheticModules
63
+
64
+ def getAlgorithmSource() -> ModuleType:
65
+ from mapFolding import theDao
66
+ return theDao
67
+
68
+ def getAlgorithmCallable() -> Callable[..., None]:
69
+ algorithmSource = getAlgorithmSource()
70
+ return cast(Callable[..., None], algorithmSource.doTheNeedful)
71
+
72
+ def getDispatcherCallable() -> Callable[..., None]:
73
+ from mapFolding.syntheticModules import numba_doTheNeedful
74
+ return cast(Callable[..., None], numba_doTheNeedful.doTheNeedful)
75
+
76
+ # NOTE I want this _concept_ to be well implemented and usable everywhere: Python, Numba, Jax, CUDA, idc
77
+ class computationState(TypedDict):
78
+ connectionGraph: numpy.ndarray[Tuple[int, int, int], numpy.dtype[integer[Any]]]
79
+ foldGroups: numpy.ndarray[Tuple[int], numpy.dtype[integer[Any]]]
80
+ gapsWhere: numpy.ndarray[Tuple[int], numpy.dtype[integer[Any]]]
81
+ mapShape: numpy.ndarray[Tuple[int], numpy.dtype[integer[Any]]]
82
+ my: numpy.ndarray[Tuple[int], numpy.dtype[integer[Any]]]
83
+ track: numpy.ndarray[Tuple[int, int], numpy.dtype[integer[Any]]]
84
+
85
+ @enum.verify(enum.CONTINUOUS, enum.UNIQUE) if sys.version_info >= (3, 11) else lambda x: x
86
+ class EnumIndices(enum.IntEnum):
87
+ """Base class for index enums."""
88
+ @staticmethod
89
+ def _generate_next_value_(name: str, start: int, count: int, last_values: list[Any]) -> int:
90
+ """0-indexed."""
91
+ return count
92
+
93
+ def __index__(self) -> int:
94
+ """Adapt enum to the ultra-rare event of indexing a NumPy 'ndarray', which is not the
95
+ same as `array.array`. See NumPy.org; I think it will be very popular someday."""
96
+ return self.value
97
+
98
+ class indexMy(EnumIndices):
99
+ """Indices for scalar values."""
100
+ dimensionsTotal = enum.auto()
101
+ dimensionsUnconstrained = enum.auto()
102
+ gap1ndex = enum.auto()
103
+ gap1ndexCeiling = enum.auto()
104
+ indexDimension = enum.auto()
105
+ indexLeaf = enum.auto()
106
+ indexMiniGap = enum.auto()
107
+ leaf1ndex = enum.auto()
108
+ leafConnectee = enum.auto()
109
+ taskDivisions = enum.auto()
110
+ taskIndex = enum.auto()
111
+
112
+ class indexTrack(EnumIndices):
113
+ """Indices for state tracking array."""
114
+ leafAbove = enum.auto()
115
+ leafBelow = enum.auto()
116
+ countDimensionsGapped = enum.auto()
117
+ gapRangeStart = enum.auto()
118
+
119
+ "delay realization/instantiation until a concrete value is desired"
120
+ "moment of truth: when the value is needed, not when the value is defined"
121
+
122
+ """What is a (not too complicated, integer) datatype?
123
+ - ecosystem/module
124
+ - must apathy|value|list of values
125
+ - mustn't apathy|value|list of values
126
+ - bit width
127
+ - bits maximum apathy|value
128
+ - bits minimum apathy|value
129
+ - magnitude maximum apathy|value
130
+ - ?magnitude minimum apathy|value
131
+ - signedness apathy|non-negative|non-positive|both
132
+ """
133
+
134
+ _datatypeDefault: Final[Dict[str, str]] = {
135
+ 'elephino': 'uint8',
136
+ 'foldsTotal': 'int64',
137
+ 'leavesTotal': 'uint8',
138
+ }
139
+ _datatypeModule = ''
140
+ _datatypeModuleDEFAULT: Final[str] = 'numpy'
141
+
142
+ _datatype: Dict[str, str] = defaultdict(str)
143
+
144
+ def reportDatatypeLimit(identifier: str, datatype: str, sourGrapes: Optional[bool] = False) -> str:
145
+ global _datatype
146
+ if not _datatype[identifier]:
147
+ _datatype[identifier] = datatype
148
+ elif _datatype[identifier] == datatype:
149
+ pass
150
+ elif sourGrapes:
151
+ raise Exception(f"Datatype is '{_datatype[identifier]}' not '{datatype}', so you can take your ball and go home.")
152
+ return _datatype[identifier]
153
+
154
+ def setDatatypeModule(datatypeModule: str, sourGrapes: Optional[bool] = False) -> str:
155
+ global _datatypeModule
156
+ if not _datatypeModule:
157
+ _datatypeModule = datatypeModule
158
+ elif _datatypeModule == datatypeModule:
159
+ pass
160
+ elif sourGrapes:
161
+ raise Exception(f"Datatype module is '{_datatypeModule}' not '{datatypeModule}', so you can take your ball and go home.")
162
+ return _datatypeModule
163
+
164
+ def setDatatypeElephino(datatype: str, sourGrapes: Optional[bool] = False) -> str:
165
+ return reportDatatypeLimit('elephino', datatype, sourGrapes)
166
+
167
+ def setDatatypeFoldsTotal(datatype: str, sourGrapes: Optional[bool] = False) -> str:
168
+ return reportDatatypeLimit('foldsTotal', datatype, sourGrapes)
169
+
170
+ def setDatatypeLeavesTotal(datatype: str, sourGrapes: Optional[bool] = False) -> str:
171
+ return reportDatatypeLimit('leavesTotal', datatype, sourGrapes)
172
+
173
+ def _get_datatype(identifier: str) -> str:
174
+ global _datatype
175
+ if not _datatype[identifier]:
176
+ if identifier in indexMy._member_names_:
177
+ _datatype[identifier] = _datatypeDefault.get(identifier) or _get_datatype('elephino')
178
+ elif identifier in indexTrack._member_names_:
179
+ _datatype[identifier] = _datatypeDefault.get(identifier) or _get_datatype('elephino')
180
+ else:
181
+ _datatype[identifier] = _datatypeDefault.get(identifier) or _get_datatype('foldsTotal')
182
+ return _datatype[identifier]
183
+
184
+ def _getDatatypeModule() -> str:
185
+ global _datatypeModule
186
+ if not _datatypeModule:
187
+ _datatypeModule = _datatypeModuleDEFAULT
188
+ return _datatypeModule
189
+
190
+ def setInStone(identifier: str) -> Type[Any]:
191
+ datatypeModule = _getDatatypeModule()
192
+ datatypeStr = _get_datatype(identifier)
193
+ return cast(Type[Any], getattr(eval(datatypeModule), datatypeStr))
194
+
195
+ def hackSSOTdtype(identifier: str) -> Type[Any]:
196
+ _hackSSOTdtype={
197
+ 'connectionGraph': 'dtypeLeavesTotal',
198
+ 'dtypeElephino': 'dtypeElephino',
199
+ 'dtypeFoldsTotal': 'dtypeFoldsTotal',
200
+ 'dtypeLeavesTotal': 'dtypeLeavesTotal',
201
+ 'foldGroups': 'dtypeFoldsTotal',
202
+ 'gapsWhere': 'dtypeLeavesTotal',
203
+ 'mapShape': 'dtypeLeavesTotal',
204
+ 'my': 'dtypeElephino',
205
+ 'track': 'dtypeElephino',
206
+ }
207
+ RubeGoldBerg = _hackSSOTdtype[identifier]
208
+ if RubeGoldBerg == 'dtypeElephino':
209
+ return setInStone('elephino')
210
+ elif RubeGoldBerg == 'dtypeFoldsTotal':
211
+ return setInStone('foldsTotal')
212
+ elif RubeGoldBerg == 'dtypeLeavesTotal':
213
+ return setInStone('leavesTotal')
214
+ raise Exception("Dude, you forgot to set a value in `hackSSOTdtype`.")
215
+
216
+ def hackSSOTdatatype(identifier: str) -> str:
217
+ _hackSSOTdatatype={
218
+ 'connectionGraph': 'datatypeLeavesTotal',
219
+ 'countDimensionsGapped': 'datatypeLeavesTotal',
220
+ 'datatypeElephino': 'datatypeElephino',
221
+ 'datatypeFoldsTotal': 'datatypeFoldsTotal',
222
+ 'datatypeLeavesTotal': 'datatypeLeavesTotal',
223
+ 'dimensionsTotal': 'datatypeLeavesTotal',
224
+ 'dimensionsUnconstrained': 'datatypeLeavesTotal',
225
+ 'foldGroups': 'datatypeFoldsTotal',
226
+ 'gap1ndex': 'datatypeLeavesTotal',
227
+ 'gap1ndexCeiling': 'datatypeElephino',
228
+ 'gapRangeStart': 'datatypeElephino',
229
+ 'gapsWhere': 'datatypeLeavesTotal',
230
+ 'groupsOfFolds': 'datatypeFoldsTotal',
231
+ 'indexDimension': 'datatypeLeavesTotal',
232
+ 'indexLeaf': 'datatypeLeavesTotal',
233
+ 'indexMiniGap': 'datatypeElephino',
234
+ 'leaf1ndex': 'datatypeLeavesTotal',
235
+ 'leafAbove': 'datatypeLeavesTotal',
236
+ 'leafBelow': 'datatypeLeavesTotal',
237
+ 'leafConnectee': 'datatypeLeavesTotal',
238
+ 'mapShape': 'datatypeLeavesTotal',
239
+ 'my': 'datatypeElephino',
240
+ 'taskDivisions': 'datatypeLeavesTotal',
241
+ 'taskIndex': 'datatypeLeavesTotal',
242
+ 'track': 'datatypeElephino',
243
+ }
244
+ RubeGoldBerg = _hackSSOTdatatype[identifier]
245
+ if RubeGoldBerg == 'datatypeElephino':
246
+ return _get_datatype('elephino')
247
+ elif RubeGoldBerg == 'datatypeFoldsTotal':
248
+ return _get_datatype('foldsTotal')
249
+ elif RubeGoldBerg == 'datatypeLeavesTotal':
250
+ return _get_datatype('leavesTotal')
251
+ raise Exception("Dude, you forgot to set a value in `hackSSOTdatatype`.")
@@ -0,0 +1,115 @@
1
+ """TODO learn how to use this efficiently and effectively to solve problems, be DRY, and have SSOT."""
2
+ from typing import Final, TYPE_CHECKING, Dict, Any, Union, Callable, Tuple, Any
3
+ import numba
4
+ import numba.core.compiler
5
+ try:
6
+ from typing import NotRequired
7
+ except ImportError:
8
+ from typing_extensions import NotRequired
9
+
10
+ if TYPE_CHECKING:
11
+ from typing import TypedDict
12
+ else:
13
+ TypedDict = dict
14
+
15
+ """
16
+ Old notes that are not entirely accurate.
17
+
18
+ | **Option** | **Description** | **Why** | **Size** | **But** |
19
+ | ----------------------- | --------------------------------------------------- | --------------------- | --------------- | ------------------------ |
20
+ | `_dbg_extend_lifetimes` | Debug option to extend object lifetimes | Debugging | | |
21
+ | `_dbg_optnone` | Disable optimization for debugging | Debugging | | |
22
+ | `debug` | Enable debug mode with additional checks | Debugging | | |
23
+ | `no_rewrites` | Disable AST rewrites optimization | Debugging | | |
24
+ | `boundscheck` | Enable array bounds checking (slows execution) | Error checking | Larger | Slower |
25
+ | `error_model` | Divide by zero: kill or chill? | Error checking | ? | |
26
+ | `_nrt` | Enable No Runtime type checking | Startup speed | Smaller | No type protection |
27
+ | `fastmath` | Reduce float potential precision | Float speed | Smaller | Discriminatory, untested |
28
+ | `forceinline` | Force function inlining | Reduce function calls | Likely larger | |
29
+ | `forceobj` | Force object mode compilation | Inclusiveness | Larger | Slower execution |
30
+ | `inline` | Algorithmically choose inlining | Speed | Slightly larger | |
31
+ | `looplift` | Enable loop lifting optimization | Speed (if applicable) | Larger | Exclusionary |
32
+ | `no_cfunc_wrapper` | Disable C function wrapper generation | Size | Smaller | Exclusionary |
33
+ | `no_cpython_wrapper` | Disable Python C-API wrapper generation | Size | Smallest | Exclusionary |
34
+
35
+ """
36
+
37
+ # TODO try to implement all possible parameters, but use `NotRequired` for the more esoteric ones
38
+ class ParametersNumba(TypedDict):
39
+ _dbg_extend_lifetimes: NotRequired[bool]
40
+ _dbg_optnone: NotRequired[bool]
41
+ _nrt: NotRequired[bool]
42
+ boundscheck: NotRequired[bool]
43
+ cache: bool
44
+ debug: NotRequired[bool]
45
+ error_model: str
46
+ fastmath: bool
47
+ forceinline: bool
48
+ forceobj: NotRequired[bool]
49
+ inline: str
50
+ locals: NotRequired[Dict[str, Any]]
51
+ looplift: bool
52
+ no_cfunc_wrapper: bool
53
+ no_cpython_wrapper: bool
54
+ no_rewrites: NotRequired[bool]
55
+ nogil: NotRequired[bool]
56
+ nopython: bool
57
+ parallel: bool
58
+ pipeline_class: NotRequired[numba.core.compiler.CompilerBase]
59
+ signature_or_function: NotRequired[Union[Any, Callable, str, Tuple]]
60
+ target: NotRequired[str]
61
+
62
+ parametersNumbaFailEarly: Final[ParametersNumba] = {
63
+ '_nrt': True,
64
+ 'boundscheck': True,
65
+ 'cache': True,
66
+ 'error_model': 'python',
67
+ 'fastmath': False,
68
+ 'forceinline': True,
69
+ 'inline': 'always',
70
+ 'looplift': False,
71
+ 'no_cfunc_wrapper': False,
72
+ 'no_cpython_wrapper': False,
73
+ 'nopython': True,
74
+ 'parallel': False,
75
+ }
76
+ """For a production function: speed is irrelevant, error discovery is paramount, must be compatible with anything downstream."""
77
+
78
+ parametersNumbaDEFAULT: Final[ParametersNumba] = {
79
+ '_nrt': True,
80
+ 'boundscheck': False,
81
+ 'cache': True,
82
+ 'error_model': 'numpy',
83
+ 'fastmath': True,
84
+ 'forceinline': True,
85
+ 'inline': 'always',
86
+ 'looplift': False,
87
+ 'no_cfunc_wrapper': False,
88
+ 'no_cpython_wrapper': False,
89
+ 'nopython': True,
90
+ 'parallel': False,
91
+ }
92
+ """Middle of the road: fast, lean, but will talk to non-jitted functions."""
93
+
94
+ parametersNumbaParallelDEFAULT: Final[ParametersNumba] = {
95
+ **parametersNumbaDEFAULT,
96
+ '_nrt': True,
97
+ 'parallel': True,
98
+ }
99
+ """Middle of the road: fast, lean, but will talk to non-jitted functions."""
100
+
101
+ parametersNumbaSuperJit: Final[ParametersNumba] = {
102
+ **parametersNumbaDEFAULT,
103
+ 'no_cfunc_wrapper': True,
104
+ 'no_cpython_wrapper': True,
105
+ }
106
+ """Speed, no helmet, no talking to non-jitted functions."""
107
+
108
+ parametersNumbaSuperJitParallel: Final[ParametersNumba] = {
109
+ **parametersNumbaSuperJit,
110
+ '_nrt': True,
111
+ 'parallel': True,
112
+ }
113
+ """Speed, no helmet, concurrency, no talking to non-jitted functions.
114
+ Claude says, "The NRT is Numba's memory management system that handles memory allocation and deallocation for array operations. Because of array copying, you need to have NRT enabled." IDK which AI assistant autocompleted this, but, "The NRT is a bit slower than the default memory management system, but it's necessary for certain operations."
115
+ """