mapFolding 0.6.0__py3-none-any.whl → 0.7.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (53) hide show
  1. mapFolding/__init__.py +6 -104
  2. mapFolding/basecamp.py +12 -8
  3. mapFolding/beDRY.py +96 -286
  4. mapFolding/filesystem.py +87 -0
  5. mapFolding/noHomeYet.py +20 -0
  6. mapFolding/oeis.py +46 -39
  7. mapFolding/reference/flattened.py +377 -0
  8. mapFolding/reference/hunterNumba.py +132 -0
  9. mapFolding/reference/irvineJavaPort.py +120 -0
  10. mapFolding/reference/jax.py +208 -0
  11. mapFolding/reference/lunnan.py +153 -0
  12. mapFolding/reference/lunnanNumpy.py +123 -0
  13. mapFolding/reference/lunnanWhile.py +121 -0
  14. mapFolding/reference/rotatedEntryPoint.py +240 -0
  15. mapFolding/reference/total_countPlus1vsPlusN.py +211 -0
  16. mapFolding/someAssemblyRequired/Z0Z_workbench.py +34 -0
  17. mapFolding/someAssemblyRequired/__init__.py +16 -0
  18. mapFolding/someAssemblyRequired/getLLVMforNoReason.py +21 -0
  19. mapFolding/someAssemblyRequired/ingredientsNumba.py +100 -0
  20. mapFolding/someAssemblyRequired/synthesizeCountingFunctions.py +7 -0
  21. mapFolding/someAssemblyRequired/synthesizeDataConverters.py +135 -0
  22. mapFolding/someAssemblyRequired/synthesizeNumba.py +91 -0
  23. mapFolding/someAssemblyRequired/synthesizeNumbaJob.py +417 -0
  24. mapFolding/someAssemblyRequired/synthesizeNumbaModules.py +91 -0
  25. mapFolding/someAssemblyRequired/transformationTools.py +425 -0
  26. mapFolding/someAssemblyRequired/whatWillBe.py +311 -0
  27. mapFolding/syntheticModules/__init__.py +0 -0
  28. mapFolding/syntheticModules/dataNamespaceFlattened.py +30 -0
  29. mapFolding/syntheticModules/numbaCount.py +90 -0
  30. mapFolding/syntheticModules/numbaCountExample.py +158 -0
  31. mapFolding/syntheticModules/numbaCountSequential.py +110 -0
  32. mapFolding/syntheticModules/numbaCount_doTheNeedful.py +13 -0
  33. mapFolding/syntheticModules/numba_doTheNeedful.py +12 -0
  34. mapFolding/syntheticModules/numba_doTheNeedfulExample.py +13 -0
  35. mapFolding/theDao.py +203 -227
  36. mapFolding/theSSOT.py +255 -102
  37. {mapfolding-0.6.0.dist-info → mapfolding-0.7.0.dist-info}/METADATA +7 -6
  38. mapfolding-0.7.0.dist-info/RECORD +50 -0
  39. {mapfolding-0.6.0.dist-info → mapfolding-0.7.0.dist-info}/WHEEL +1 -1
  40. {mapfolding-0.6.0.dist-info → mapfolding-0.7.0.dist-info}/top_level.txt +1 -0
  41. tests/__init__.py +0 -0
  42. tests/conftest.py +278 -0
  43. tests/test_computations.py +49 -0
  44. tests/test_filesystem.py +52 -0
  45. tests/test_oeis.py +128 -0
  46. tests/test_other.py +84 -0
  47. tests/test_tasks.py +50 -0
  48. mapFolding/theConfiguration.py +0 -58
  49. mapFolding/theSSOTdatatypes.py +0 -155
  50. mapFolding/theWrongWay.py +0 -7
  51. mapfolding-0.6.0.dist-info/RECORD +0 -16
  52. {mapfolding-0.6.0.dist-info → mapfolding-0.7.0.dist-info}/LICENSE +0 -0
  53. {mapfolding-0.6.0.dist-info → mapfolding-0.7.0.dist-info}/entry_points.txt +0 -0
@@ -0,0 +1,311 @@
1
+ """
2
+ - Settings for synthesizing the modules used by the package (i.e., the flow for numba)
3
+ - Settings for synthesizing modules that could be used by the package (e.g., the flow for JAX)
4
+ - Therefore, an abstracted system for creating settings for the package
5
+ - And with only a little more effort, an abstracted system for creating settings to synthesize arbitrary subsets of modules for arbitrary packages
6
+ """
7
+ from mapFolding.someAssemblyRequired.transformationTools import *
8
+ from mapFolding.theSSOT import (
9
+ FREAKOUT,
10
+ getDatatypePackage,
11
+ getSourceAlgorithm,
12
+ theDataclassIdentifierAsStr,
13
+ theDataclassInstanceAsStr,
14
+ theDispatcherCallableAsStr,
15
+ theFileExtension,
16
+ theFormatStrModuleForCallableSynthetic,
17
+ theFormatStrModuleSynthetic,
18
+ theLogicalPathModuleDataclass,
19
+ theLogicalPathModuleDispatcherSynthetic,
20
+ theModuleOfSyntheticModules,
21
+ thePackageName,
22
+ thePathPackage,
23
+ Z0Z_sequentialCallableAsStr,
24
+ )
25
+ from autoflake import fix_code as autoflake_fix_code
26
+ from collections import defaultdict
27
+ from collections.abc import Sequence
28
+ from inspect import getsource as inspect_getsource
29
+ from mapFolding.someAssemblyRequired.ingredientsNumba import parametersNumbaDEFAULT, parametersNumbaSuperJit, parametersNumbaSuperJitParallel, ParametersNumba
30
+ from pathlib import Path
31
+ from types import ModuleType
32
+ from typing import NamedTuple
33
+ from Z0Z_tools import updateExtendPolishDictionaryLists
34
+ import ast
35
+ import dataclasses
36
+
37
+ """
38
+ Start with what is: theDao.py
39
+ Create settings that can transform into what I or the user want it to be.
40
+
41
+ The simplest flow with numba is:
42
+ 1. one module
43
+ 2. dispatcher
44
+ - initialize data with makeJob
45
+ - smash dataclass
46
+ - call countSequential
47
+ 3. countSequential
48
+ - jitted, not super-jitted
49
+ - functions inlined (or I'd have to jit them)
50
+ - return groupsOfFolds
51
+ 4. recycle the dataclass with groupsOfFolds
52
+ 5. return the dataclass
53
+ """
54
+
55
+ @dataclasses.dataclass
56
+ class RecipeSynthesizeFlow:
57
+ """Settings for synthesizing flow."""
58
+ # TODO consider `IngredientsFlow` or similar
59
+ sourceAlgorithm: ModuleType = getSourceAlgorithm()
60
+ sourcePython: str = inspect_getsource(sourceAlgorithm)
61
+ # sourcePython: str = inspect_getsource(self.sourceAlgorithm)
62
+ # "self" is not defined
63
+ # I still hate the OOP paradigm. But I like this dataclass stuff.
64
+ source_astModule: ast.Module = ast.parse(sourcePython)
65
+
66
+ # ========================================
67
+ # Filesystem
68
+ pathPackage: Path = thePathPackage
69
+ fileExtension: str = theFileExtension
70
+
71
+ # ========================================
72
+ # Logical identifiers
73
+ # meta
74
+ formatStrModuleSynthetic: str = theFormatStrModuleSynthetic
75
+ formatStrModuleForCallableSynthetic: str = theFormatStrModuleForCallableSynthetic
76
+
77
+ # Package
78
+ packageName: ast_Identifier = thePackageName
79
+
80
+ # Module
81
+ moduleOfSyntheticModules: str = theModuleOfSyntheticModules
82
+ logicalPathModuleDataclass: str = theLogicalPathModuleDataclass
83
+ logicalPathModuleDispatcher: str = theLogicalPathModuleDispatcherSynthetic
84
+ dataConverterModule: str = 'dataNamespaceFlattened'
85
+
86
+ # Function
87
+ dataclassIdentifierAsStr: str = theDataclassIdentifierAsStr
88
+ dispatcherCallableAsStr: str = theDispatcherCallableAsStr
89
+ dataConverterCallableAsStr: str = 'flattenData'
90
+ sequentialCallableAsStr: str = Z0Z_sequentialCallableAsStr
91
+
92
+ # Variable
93
+ dataclassInstanceAsStr: str = theDataclassInstanceAsStr
94
+
95
+ class LedgerOfImports:
96
+ def __init__(self, startWith: ast.AST | None = None) -> None:
97
+ self.dictionaryImportFrom: dict[str, list[tuple[str, str | None]]] = defaultdict(list)
98
+ self.listImport: list[str] = []
99
+
100
+ if startWith:
101
+ self.walkThis(startWith)
102
+
103
+ def addAst(self, astImport_: ast.Import | ast.ImportFrom) -> None:
104
+ if not isinstance(astImport_, (ast.Import, ast.ImportFrom)): # pyright: ignore[reportUnnecessaryIsInstance]
105
+ raise ValueError(f"Expected ast.Import or ast.ImportFrom, got {type(astImport_)}")
106
+ if isinstance(astImport_, ast.Import):
107
+ for alias in astImport_.names:
108
+ self.listImport.append(alias.name)
109
+ else:
110
+ if astImport_.module is not None:
111
+ for alias in astImport_.names:
112
+ self.dictionaryImportFrom[astImport_.module].append((alias.name, alias.asname))
113
+
114
+ def addImportStr(self, module: str) -> None:
115
+ self.listImport.append(module)
116
+
117
+ def addImportFromStr(self, module: str, name: str, asname: str | None = None) -> None:
118
+ self.dictionaryImportFrom[module].append((name, asname))
119
+
120
+ def makeListAst(self) -> list[ast.ImportFrom | ast.Import]:
121
+ listAstImportFrom: list[ast.ImportFrom] = []
122
+
123
+ for module, listOfNameTuples in sorted(self.dictionaryImportFrom.items()):
124
+ listOfNameTuples = sorted(list(set(listOfNameTuples)), key=lambda nameTuple: nameTuple[0])
125
+ listAlias: list[ast.alias] = []
126
+ for name, asname in listOfNameTuples:
127
+ listAlias.append(Make.astAlias(name, asname))
128
+ listAstImportFrom.append(Make.astImportFrom(module, listAlias))
129
+
130
+ listAstImport: list[ast.Import] = [Make.astImport(name) for name in sorted(set(self.listImport))]
131
+ return listAstImportFrom + listAstImport
132
+
133
+ def update(self, *fromLedger: 'LedgerOfImports') -> None:
134
+ """
135
+ Update this ledger with imports from one or more other ledgers.
136
+
137
+ Parameters:
138
+ *fromTracker: One or more other `LedgerOfImports` objects from which to merge.
139
+ """
140
+ self.dictionaryImportFrom = updateExtendPolishDictionaryLists(self.dictionaryImportFrom, *(ledger.dictionaryImportFrom for ledger in fromLedger), destroyDuplicates=True, reorderLists=True)
141
+
142
+ for ledger in fromLedger:
143
+ self.listImport.extend(ledger.listImport)
144
+
145
+ def walkThis(self, walkThis: ast.AST) -> None:
146
+ for smurf in ast.walk(walkThis):
147
+ if isinstance(smurf, (ast.Import, ast.ImportFrom)):
148
+ self.addAst(smurf)
149
+
150
+ @dataclasses.dataclass
151
+ class IngredientsFunction:
152
+ """Everything necessary to integrate a function into a module should be here."""
153
+ FunctionDef: ast.FunctionDef # hint `Make.astFunctionDef`
154
+ imports: LedgerOfImports = dataclasses.field(default_factory=LedgerOfImports)
155
+
156
+ @dataclasses.dataclass
157
+ class IngredientsModule:
158
+ """Everything necessary to create a module, including the package context, should be here."""
159
+ name: ast_Identifier
160
+ ingredientsFunction: dataclasses.InitVar[Sequence[IngredientsFunction] | IngredientsFunction | None] = None
161
+
162
+ imports: LedgerOfImports = dataclasses.field(default_factory=LedgerOfImports)
163
+ prologue: list[ast.stmt] = dataclasses.field(default_factory=list)
164
+ functions: list[ast.FunctionDef | ast.stmt] = dataclasses.field(default_factory=list)
165
+ epilogue: list[ast.stmt] = dataclasses.field(default_factory=list)
166
+ launcher: list[ast.stmt] = dataclasses.field(default_factory=list)
167
+
168
+ packageName: ast_Identifier | None= thePackageName
169
+ logicalPathINFIX: ast_Identifier | strDotStrCuzPyStoopid | None = None # module names other than the module itself and the package name
170
+ pathPackage: Path = thePathPackage
171
+ fileExtension: str = theFileExtension
172
+ type_ignores: list[ast.TypeIgnore] = dataclasses.field(default_factory=list)
173
+
174
+ def _getLogicalPathParent(self) -> str | None:
175
+ listModules: list[ast_Identifier] = []
176
+ if self.packageName:
177
+ listModules.append(self.packageName)
178
+ if self.logicalPathINFIX:
179
+ listModules.append(self.logicalPathINFIX)
180
+ if listModules:
181
+ return '.'.join(listModules)
182
+
183
+ def _getLogicalPathAbsolute(self) -> str:
184
+ listModules: list[ast_Identifier] = []
185
+ logicalPathParent: str | None = self._getLogicalPathParent()
186
+ if logicalPathParent:
187
+ listModules.append(logicalPathParent)
188
+ listModules.append(self.name)
189
+ return '.'.join(listModules)
190
+
191
+ @property
192
+ def pathFilename(self) -> Path:
193
+ pathRoot: Path = self.pathPackage
194
+ filename = self.name + self.fileExtension
195
+ if self.logicalPathINFIX:
196
+ whyIsThisStillAThing = self.logicalPathINFIX.split('.')
197
+ pathRoot = pathRoot.joinpath(*whyIsThisStillAThing)
198
+ return pathRoot.joinpath(filename)
199
+
200
+ @property
201
+ def absoluteImport(self) -> ast.Import:
202
+ return Make.astImport(self._getLogicalPathAbsolute())
203
+
204
+ @property
205
+ def absoluteImportFrom(self) -> ast.ImportFrom:
206
+ """ `from . import theModule` """
207
+ logicalPathParent: str | None = self._getLogicalPathParent()
208
+ if logicalPathParent is None:
209
+ logicalPathParent = '.'
210
+ return Make.astImportFrom(logicalPathParent, [Make.astAlias(self.name)])
211
+
212
+ def __post_init__(self, ingredientsFunction: Sequence[IngredientsFunction] | IngredientsFunction | None = None) -> None:
213
+ if ingredientsFunction is not None:
214
+ if isinstance(ingredientsFunction, IngredientsFunction):
215
+ self.addIngredientsFunction(ingredientsFunction)
216
+ else:
217
+ self.addIngredientsFunction(*ingredientsFunction)
218
+
219
+ def addIngredientsFunction(self, *ingredientsFunction: IngredientsFunction) -> None:
220
+ """Add one or more `IngredientsFunction`. """
221
+ listLedgers: list[LedgerOfImports] = []
222
+ for definition in ingredientsFunction:
223
+ self.functions.append(definition.FunctionDef)
224
+ listLedgers.append(definition.imports)
225
+ self.imports.update(*listLedgers)
226
+
227
+ def _makeModuleBody(self) -> list[ast.stmt]:
228
+ """Constructs the body of the module, including prologue, functions, epilogue, and launcher."""
229
+ body: list[ast.stmt] = []
230
+ body.extend(self.imports.makeListAst())
231
+ body.extend(self.prologue)
232
+ body.extend(self.functions)
233
+ body.extend(self.epilogue)
234
+ body.extend(self.launcher)
235
+ # TODO `launcher` must start with `if __name__ == '__main__':` and be indented
236
+ return body
237
+
238
+ def writeModule(self) -> None:
239
+ """Writes the module to disk with proper imports and functions.
240
+
241
+ This method creates a proper AST module with imports and function definitions,
242
+ fixes missing locations, unpacks the AST to Python code, applies autoflake
243
+ to clean up imports, and writes the resulting code to the appropriate file.
244
+ """
245
+ astModule = Make.astModule(body=self._makeModuleBody(), type_ignores=self.type_ignores)
246
+ ast.fix_missing_locations(astModule)
247
+ pythonSource: str = ast.unparse(astModule)
248
+ if not pythonSource: raise FREAKOUT
249
+ autoflake_additional_imports: list[str] = []
250
+ if self.packageName:
251
+ autoflake_additional_imports.append(self.packageName)
252
+ # TODO LedgerOfImports method: list of package names. autoflake_additional_imports.extend()
253
+ autoflake_additional_imports.append(getDatatypePackage())
254
+ pythonSource = autoflake_fix_code(pythonSource, autoflake_additional_imports, expand_star_imports=False, remove_all_unused_imports=False, remove_duplicate_keys = False, remove_unused_variables = False,)
255
+ self.pathFilename.write_text(pythonSource)
256
+
257
+ @dataclasses.dataclass
258
+ class RecipeSynthesizeCountingFunction:
259
+ """Settings for synthesizing counting functions."""
260
+ ingredients: IngredientsFunction
261
+
262
+ numbaFlow: RecipeSynthesizeFlow = RecipeSynthesizeFlow()
263
+
264
+ # https://github.com/hunterhogan/mapFolding/issues/3
265
+ sequentialFunctionDef = extractFunctionDef(numbaFlow.sequentialCallableAsStr, numbaFlow.source_astModule)
266
+ if sequentialFunctionDef is None: raise FREAKOUT
267
+
268
+ numbaCountSequential = RecipeSynthesizeCountingFunction(IngredientsFunction(
269
+ FunctionDef=sequentialFunctionDef,
270
+ imports=LedgerOfImports(numbaFlow.source_astModule)
271
+ ))
272
+
273
+ # the data converter and the dispatcher could be in the same module.
274
+
275
+ Z0Z_autoflake_additional_imports: list[str] = []
276
+ Z0Z_autoflake_additional_imports.append(thePackageName)
277
+
278
+ class ParametersSynthesizeNumbaCallable(NamedTuple):
279
+ callableTarget: str
280
+ parametersNumba: ParametersNumba | None = None
281
+ inlineCallables: bool = False
282
+
283
+ listNumbaCallableDispatchees: list[ParametersSynthesizeNumbaCallable] = [
284
+ ParametersSynthesizeNumbaCallable('countParallel', parametersNumbaSuperJitParallel, True),
285
+ ParametersSynthesizeNumbaCallable('countSequential', parametersNumbaSuperJit, True),
286
+ ParametersSynthesizeNumbaCallable('countInitialize', parametersNumbaDEFAULT, True),
287
+ ]
288
+
289
+ _datatypeModuleScalar = ''
290
+ _decoratorCallable = ''
291
+
292
+ # if numba
293
+ _datatypeModuleScalar = 'numba'
294
+ _decoratorCallable = 'jit'
295
+ Z0Z_autoflake_additional_imports.append('numba')
296
+
297
+ def Z0Z_getDatatypeModuleScalar() -> str:
298
+ return _datatypeModuleScalar
299
+
300
+ def Z0Z_setDatatypeModuleScalar(moduleName: str) -> str:
301
+ global _datatypeModuleScalar
302
+ _datatypeModuleScalar = moduleName
303
+ return _datatypeModuleScalar
304
+
305
+ def Z0Z_getDecoratorCallable() -> str:
306
+ return _decoratorCallable
307
+
308
+ def Z0Z_setDecoratorCallable(decoratorName: str) -> str:
309
+ global _decoratorCallable
310
+ _decoratorCallable = decoratorName
311
+ return _decoratorCallable
File without changes
@@ -0,0 +1,30 @@
1
+ from mapFolding.someAssemblyRequired.synthesizeDataConverters import makeStateJob
2
+ from mapFolding.syntheticModules.numbaCount_doTheNeedful import doTheNeedful
3
+ from mapFolding.theSSOT import Array1DElephino, Array1DFoldsTotal, Array1DLeavesTotal, Array3D, ComputationState, DatatypeElephino, DatatypeFoldsTotal, DatatypeLeavesTotal
4
+
5
+ def flattenData(state: ComputationState) -> ComputationState:
6
+ state = makeStateJob(state.mapShape, writeJob=False)
7
+ mapShape: tuple[DatatypeLeavesTotal, ...] = state.mapShape
8
+ leavesTotal: DatatypeLeavesTotal = state.leavesTotal
9
+ taskDivisions: DatatypeLeavesTotal = state.taskDivisions
10
+ connectionGraph: Array3D = state.connectionGraph
11
+ dimensionsTotal: DatatypeLeavesTotal = state.dimensionsTotal
12
+ countDimensionsGapped: Array1DLeavesTotal = state.countDimensionsGapped
13
+ dimensionsUnconstrained: DatatypeLeavesTotal = state.dimensionsUnconstrained
14
+ gapRangeStart: Array1DElephino = state.gapRangeStart
15
+ gapsWhere: Array1DLeavesTotal = state.gapsWhere
16
+ leafAbove: Array1DLeavesTotal = state.leafAbove
17
+ leafBelow: Array1DLeavesTotal = state.leafBelow
18
+ foldGroups: Array1DFoldsTotal = state.foldGroups
19
+ foldsTotal: DatatypeFoldsTotal = state.foldsTotal
20
+ gap1ndex: DatatypeLeavesTotal = state.gap1ndex
21
+ gap1ndexCeiling: DatatypeElephino = state.gap1ndexCeiling
22
+ groupsOfFolds: DatatypeFoldsTotal = state.groupsOfFolds
23
+ indexDimension: DatatypeLeavesTotal = state.indexDimension
24
+ indexLeaf: DatatypeLeavesTotal = state.indexLeaf
25
+ indexMiniGap: DatatypeElephino = state.indexMiniGap
26
+ leaf1ndex: DatatypeElephino = state.leaf1ndex
27
+ leafConnectee: DatatypeElephino = state.leafConnectee
28
+ taskIndex: DatatypeLeavesTotal = state.taskIndex
29
+ mapShape, leavesTotal, taskDivisions, connectionGraph, dimensionsTotal, countDimensionsGapped, dimensionsUnconstrained, gapRangeStart, gapsWhere, leafAbove, leafBelow, foldGroups, foldsTotal, gap1ndex, gap1ndexCeiling, groupsOfFolds, indexDimension, indexLeaf, indexMiniGap, leaf1ndex, leafConnectee, taskIndex = doTheNeedful(mapShape, leavesTotal, taskDivisions, connectionGraph, dimensionsTotal, countDimensionsGapped, dimensionsUnconstrained, gapRangeStart, gapsWhere, leafAbove, leafBelow, foldGroups, foldsTotal, gap1ndex, gap1ndexCeiling, groupsOfFolds, indexDimension, indexLeaf, indexMiniGap, leaf1ndex, leafConnectee, taskIndex)
30
+ return ComputationState(mapShape=mapShape, leavesTotal=leavesTotal, taskDivisions=taskDivisions, countDimensionsGapped=countDimensionsGapped, dimensionsUnconstrained=dimensionsUnconstrained, gapRangeStart=gapRangeStart, gapsWhere=gapsWhere, leafAbove=leafAbove, leafBelow=leafBelow, foldGroups=foldGroups, foldsTotal=foldsTotal, gap1ndex=gap1ndex, gap1ndexCeiling=gap1ndexCeiling, groupsOfFolds=groupsOfFolds, indexDimension=indexDimension, indexLeaf=indexLeaf, indexMiniGap=indexMiniGap, leaf1ndex=leaf1ndex, leafConnectee=leafConnectee, taskIndex=taskIndex)
@@ -0,0 +1,90 @@
1
+ from import , ,
2
+ from mapFolding.theSSOT import ComputationState, ComputationState, ComputationState
3
+ import copy
4
+
5
+ @(ComputationState(ComputationState), _nrt=True, boundscheck=False, cache=True, error_model='numpy', fastmath=True, forceinline=True, inline='always', looplift=False, no_cfunc_wrapper=True, no_cpython_wrapper=True, nopython=True, parallel=True)
6
+ def countParallel(statePARALLEL: ComputationState) -> ComputationState:
7
+ stateComplete = copy.deepcopy(statePARALLEL)
8
+ for indexSherpa in range(statePARALLEL.taskDivisions):
9
+ state = copy.deepcopy(statePARALLEL)
10
+ state.taskIndex = indexSherpa
11
+ while state.leaf1ndex > 0:
12
+ if state.leaf1ndex <= 1 or state.leafBelow[0] == 1:
13
+ if state.leaf1ndex > state.leavesTotal:
14
+ state.groupsOfFolds += 1
15
+ else:
16
+ state = state
17
+ while state.indexDimension < state.dimensionsTotal:
18
+ if state.connectionGraph[state.indexDimension, state.leaf1ndex, state.leaf1ndex] == state.leaf1ndex:
19
+ state = state
20
+ else:
21
+ state = state
22
+ while state.leafConnectee != state.leaf1ndex:
23
+ if state.leaf1ndex != state.taskDivisions or state.leafConnectee % state.taskDivisions == state.taskIndex:
24
+ state = state
25
+ state = state
26
+ state = state
27
+ state = state
28
+ while state.indexMiniGap < state.gap1ndexCeiling:
29
+ state = state
30
+ state = state
31
+ while state.leaf1ndex > 0 and state.gap1ndex == state.gapRangeStart[state.leaf1ndex - 1]:
32
+ state = state
33
+ if state.leaf1ndex > 0:
34
+ state = state
35
+ stateComplete.foldGroups[state.taskIndex] = state.groupsOfFolds
36
+ return stateComplete
37
+
38
+ @(ComputationState(ComputationState), _nrt=True, boundscheck=False, cache=True, error_model='numpy', fastmath=True, forceinline=True, inline='always', looplift=False, no_cfunc_wrapper=True, no_cpython_wrapper=True, nopython=True, parallel=False)
39
+ def countSequential(state: ComputationState) -> ComputationState:
40
+ while state.leaf1ndex > 0:
41
+ if state.leaf1ndex <= 1 or state.leafBelow[0] == 1:
42
+ if state.leaf1ndex > state.leavesTotal:
43
+ state.groupsOfFolds += 1
44
+ else:
45
+ state = state
46
+ while state.indexDimension < state.dimensionsTotal:
47
+ state = state
48
+ if state.leafConnectee == state.leaf1ndex:
49
+ state = state
50
+ else:
51
+ while state.leafConnectee != state.leaf1ndex:
52
+ state = state
53
+ state = state
54
+ state = state
55
+ state = state
56
+ while state.indexMiniGap < state.gap1ndexCeiling:
57
+ state = state
58
+ state = state
59
+ while state.leaf1ndex > 0 and state.gap1ndex == state.gapRangeStart[state.leaf1ndex - 1]:
60
+ state = state
61
+ if state.leaf1ndex > 0:
62
+ state = state
63
+ state.foldGroups[state.taskIndex] = state.groupsOfFolds
64
+ return state
65
+
66
+ @(ComputationState(ComputationState), _nrt=True, boundscheck=False, cache=True, error_model='numpy', fastmath=True, forceinline=True, inline='always', looplift=False, no_cfunc_wrapper=False, no_cpython_wrapper=False, nopython=True, parallel=False)
67
+ def countInitialize(state: ComputationState) -> ComputationState:
68
+ while state.leaf1ndex > 0:
69
+ if state.leaf1ndex <= 1 or state.leafBelow[0] == 1:
70
+ state = state
71
+ while state.indexDimension < state.dimensionsTotal:
72
+ state = state
73
+ if state.leafConnectee == state.leaf1ndex:
74
+ state = state
75
+ else:
76
+ while state.leafConnectee != state.leaf1ndex:
77
+ state = state
78
+ state = state
79
+ state = state
80
+ if not state.dimensionsUnconstrained:
81
+ state = state
82
+ state = state
83
+ while state.indexMiniGap < state.gap1ndexCeiling:
84
+ state = state
85
+ state = state
86
+ if state.leaf1ndex > 0:
87
+ state = state
88
+ if state.gap1ndex > 0:
89
+ break
90
+ return state
@@ -0,0 +1,158 @@
1
+ from mapFolding.theSSOT import indexMy, indexTrack
2
+ from numba import uint16, prange, int64, jit
3
+ from numpy import ndarray, dtype, integer
4
+ from typing import Any
5
+
6
+ @jit((uint16[:, :, ::1], uint16[::1], uint16[::1], uint16[:, ::1]), _nrt=True, boundscheck=False, cache=True, error_model='numpy', fastmath=True, forceinline=True, inline='always', looplift=False, no_cfunc_wrapper=False, no_cpython_wrapper=False, nopython=True, parallel=False)
7
+ def countInitialize(connectionGraph: ndarray[tuple[int, int, int], dtype[integer[Any]]], gapsWhere: ndarray[tuple[int], dtype[integer[Any]]], my: ndarray[tuple[int], dtype[integer[Any]]], track: ndarray[tuple[int, int], dtype[integer[Any]]]) -> None:
8
+ while my[indexMy.leaf1ndex] > 0:
9
+ if my[indexMy.leaf1ndex] <= 1 or track[indexTrack.leafBelow, 0] == 1:
10
+ my[indexMy.dimensionsUnconstrained] = my[indexMy.dimensionsTotal]
11
+ my[indexMy.gap1ndexCeiling] = track[indexTrack.gapRangeStart, my[indexMy.leaf1ndex] - 1]
12
+ my[indexMy.indexDimension] = 0
13
+ while my[indexMy.indexDimension] < my[indexMy.dimensionsTotal]:
14
+ if connectionGraph[my[indexMy.indexDimension], my[indexMy.leaf1ndex], my[indexMy.leaf1ndex]] == my[indexMy.leaf1ndex]:
15
+ my[indexMy.dimensionsUnconstrained] -= 1
16
+ else:
17
+ my[indexMy.leafConnectee] = connectionGraph[my[indexMy.indexDimension], my[indexMy.leaf1ndex], my[indexMy.leaf1ndex]]
18
+ while my[indexMy.leafConnectee] != my[indexMy.leaf1ndex]:
19
+ gapsWhere[my[indexMy.gap1ndexCeiling]] = my[indexMy.leafConnectee]
20
+ if track[indexTrack.countDimensionsGapped, my[indexMy.leafConnectee]] == 0:
21
+ my[indexMy.gap1ndexCeiling] += 1
22
+ track[indexTrack.countDimensionsGapped, my[indexMy.leafConnectee]] += 1
23
+ my[indexMy.leafConnectee] = connectionGraph[my[indexMy.indexDimension], my[indexMy.leaf1ndex], track[indexTrack.leafBelow, my[indexMy.leafConnectee]]]
24
+ my[indexMy.indexDimension] += 1
25
+ if not my[indexMy.dimensionsUnconstrained]:
26
+ my[indexMy.indexLeaf] = 0
27
+ while my[indexMy.indexLeaf] < my[indexMy.leaf1ndex]:
28
+ gapsWhere[my[indexMy.gap1ndexCeiling]] = my[indexMy.indexLeaf]
29
+ my[indexMy.gap1ndexCeiling] += 1
30
+ my[indexMy.indexLeaf] += 1
31
+ my[indexMy.indexMiniGap] = my[indexMy.gap1ndex]
32
+ while my[indexMy.indexMiniGap] < my[indexMy.gap1ndexCeiling]:
33
+ gapsWhere[my[indexMy.gap1ndex]] = gapsWhere[my[indexMy.indexMiniGap]]
34
+ if track[indexTrack.countDimensionsGapped, gapsWhere[my[indexMy.indexMiniGap]]] == my[indexMy.dimensionsUnconstrained]:
35
+ my[indexMy.gap1ndex] += 1
36
+ track[indexTrack.countDimensionsGapped, gapsWhere[my[indexMy.indexMiniGap]]] = 0
37
+ my[indexMy.indexMiniGap] += 1
38
+ if my[indexMy.leaf1ndex] > 0:
39
+ my[indexMy.gap1ndex] -= 1
40
+ track[indexTrack.leafAbove, my[indexMy.leaf1ndex]] = gapsWhere[my[indexMy.gap1ndex]]
41
+ track[indexTrack.leafBelow, my[indexMy.leaf1ndex]] = track[indexTrack.leafBelow, track[indexTrack.leafAbove, my[indexMy.leaf1ndex]]]
42
+ track[indexTrack.leafBelow, track[indexTrack.leafAbove, my[indexMy.leaf1ndex]]] = my[indexMy.leaf1ndex]
43
+ track[indexTrack.leafAbove, track[indexTrack.leafBelow, my[indexMy.leaf1ndex]]] = my[indexMy.leaf1ndex]
44
+ track[indexTrack.gapRangeStart, my[indexMy.leaf1ndex]] = my[indexMy.gap1ndex]
45
+ my[indexMy.leaf1ndex] += 1
46
+ if my[indexMy.gap1ndex] > 0:
47
+ return
48
+
49
+ @jit((uint16[:, :, ::1], int64[::1], uint16[::1], uint16[::1], uint16[:, ::1]), _nrt=True, boundscheck=False, cache=True, error_model='numpy', fastmath=True, forceinline=True, inline='always', looplift=False, no_cfunc_wrapper=True, no_cpython_wrapper=True, nopython=True, parallel=True)
50
+ def countParallel(connectionGraph: ndarray[tuple[int, int, int], dtype[integer[Any]]], foldGroups: ndarray[tuple[int], dtype[integer[Any]]], gapsWhere: ndarray[tuple[int], dtype[integer[Any]]], my: ndarray[tuple[int], dtype[integer[Any]]], track: ndarray[tuple[int, int], dtype[integer[Any]]]) -> None:
51
+ gapsWherePARALLEL = gapsWhere.copy()
52
+ myPARALLEL = my.copy()
53
+ trackPARALLEL = track.copy()
54
+ taskDivisionsPrange = myPARALLEL[indexMy.taskDivisions]
55
+ for indexSherpa in prange(taskDivisionsPrange):
56
+ groupsOfFolds: int = 0
57
+ gapsWhere = gapsWherePARALLEL.copy()
58
+ my = myPARALLEL.copy()
59
+ track = trackPARALLEL.copy()
60
+ my[indexMy.taskIndex] = indexSherpa
61
+ while my[indexMy.leaf1ndex] > 0:
62
+ if my[indexMy.leaf1ndex] <= 1 or track[indexTrack.leafBelow, 0] == 1:
63
+ if my[indexMy.leaf1ndex] > foldGroups[-1]:
64
+ groupsOfFolds += 1
65
+ else:
66
+ my[indexMy.dimensionsUnconstrained] = my[indexMy.dimensionsTotal]
67
+ my[indexMy.gap1ndexCeiling] = track[indexTrack.gapRangeStart, my[indexMy.leaf1ndex] - 1]
68
+ my[indexMy.indexDimension] = 0
69
+ while my[indexMy.indexDimension] < my[indexMy.dimensionsTotal]:
70
+ if connectionGraph[my[indexMy.indexDimension], my[indexMy.leaf1ndex], my[indexMy.leaf1ndex]] == my[indexMy.leaf1ndex]:
71
+ my[indexMy.dimensionsUnconstrained] -= 1
72
+ else:
73
+ my[indexMy.leafConnectee] = connectionGraph[my[indexMy.indexDimension], my[indexMy.leaf1ndex], my[indexMy.leaf1ndex]]
74
+ while my[indexMy.leafConnectee] != my[indexMy.leaf1ndex]:
75
+ if my[indexMy.leaf1ndex] != my[indexMy.taskDivisions] or my[indexMy.leafConnectee] % my[indexMy.taskDivisions] == my[indexMy.taskIndex]:
76
+ gapsWhere[my[indexMy.gap1ndexCeiling]] = my[indexMy.leafConnectee]
77
+ if track[indexTrack.countDimensionsGapped, my[indexMy.leafConnectee]] == 0:
78
+ my[indexMy.gap1ndexCeiling] += 1
79
+ track[indexTrack.countDimensionsGapped, my[indexMy.leafConnectee]] += 1
80
+ my[indexMy.leafConnectee] = connectionGraph[my[indexMy.indexDimension], my[indexMy.leaf1ndex], track[indexTrack.leafBelow, my[indexMy.leafConnectee]]]
81
+ my[indexMy.indexDimension] += 1
82
+ my[indexMy.indexMiniGap] = my[indexMy.gap1ndex]
83
+ while my[indexMy.indexMiniGap] < my[indexMy.gap1ndexCeiling]:
84
+ gapsWhere[my[indexMy.gap1ndex]] = gapsWhere[my[indexMy.indexMiniGap]]
85
+ if track[indexTrack.countDimensionsGapped, gapsWhere[my[indexMy.indexMiniGap]]] == my[indexMy.dimensionsUnconstrained]:
86
+ my[indexMy.gap1ndex] += 1
87
+ track[indexTrack.countDimensionsGapped, gapsWhere[my[indexMy.indexMiniGap]]] = 0
88
+ my[indexMy.indexMiniGap] += 1
89
+ while my[indexMy.leaf1ndex] > 0 and my[indexMy.gap1ndex] == track[indexTrack.gapRangeStart, my[indexMy.leaf1ndex] - 1]:
90
+ my[indexMy.leaf1ndex] -= 1
91
+ track[indexTrack.leafBelow, track[indexTrack.leafAbove, my[indexMy.leaf1ndex]]] = track[indexTrack.leafBelow, my[indexMy.leaf1ndex]]
92
+ track[indexTrack.leafAbove, track[indexTrack.leafBelow, my[indexMy.leaf1ndex]]] = track[indexTrack.leafAbove, my[indexMy.leaf1ndex]]
93
+ if my[indexMy.leaf1ndex] > 0:
94
+ my[indexMy.gap1ndex] -= 1
95
+ track[indexTrack.leafAbove, my[indexMy.leaf1ndex]] = gapsWhere[my[indexMy.gap1ndex]]
96
+ track[indexTrack.leafBelow, my[indexMy.leaf1ndex]] = track[indexTrack.leafBelow, track[indexTrack.leafAbove, my[indexMy.leaf1ndex]]]
97
+ track[indexTrack.leafBelow, track[indexTrack.leafAbove, my[indexMy.leaf1ndex]]] = my[indexMy.leaf1ndex]
98
+ track[indexTrack.leafAbove, track[indexTrack.leafBelow, my[indexMy.leaf1ndex]]] = my[indexMy.leaf1ndex]
99
+ track[indexTrack.gapRangeStart, my[indexMy.leaf1ndex]] = my[indexMy.gap1ndex]
100
+ my[indexMy.leaf1ndex] += 1
101
+ foldGroups[my[indexMy.taskIndex]] = groupsOfFolds
102
+
103
+ @jit((uint16[:, :, ::1], int64[::1], uint16[::1], uint16[::1], uint16[:, ::1]), _nrt=True, boundscheck=False, cache=True, error_model='numpy', fastmath=True, forceinline=True, inline='always', looplift=False, no_cfunc_wrapper=True, no_cpython_wrapper=True, nopython=True, parallel=False)
104
+ def countSequential(connectionGraph: ndarray[tuple[int, int, int], dtype[integer[Any]]], foldGroups: ndarray[tuple[int], dtype[integer[Any]]], gapsWhere: ndarray[tuple[int], dtype[integer[Any]]], my: ndarray[tuple[int], dtype[integer[Any]]], track: ndarray[tuple[int, int], dtype[integer[Any]]]) -> None:
105
+ leafBelow = track[indexTrack.leafBelow.value]
106
+ gapRangeStart = track[indexTrack.gapRangeStart.value]
107
+ countDimensionsGapped = track[indexTrack.countDimensionsGapped.value]
108
+ leafAbove = track[indexTrack.leafAbove.value]
109
+ leaf1ndex = my[indexMy.leaf1ndex.value]
110
+ dimensionsUnconstrained = my[indexMy.dimensionsUnconstrained.value]
111
+ dimensionsTotal = my[indexMy.dimensionsTotal.value]
112
+ gap1ndexCeiling = my[indexMy.gap1ndexCeiling.value]
113
+ indexDimension = my[indexMy.indexDimension.value]
114
+ leafConnectee = my[indexMy.leafConnectee.value]
115
+ indexMiniGap = my[indexMy.indexMiniGap.value]
116
+ gap1ndex = my[indexMy.gap1ndex.value]
117
+ taskIndex = my[indexMy.taskIndex.value]
118
+ groupsOfFolds: int = 0
119
+ while leaf1ndex > 0:
120
+ if leaf1ndex <= 1 or leafBelow[0] == 1:
121
+ if leaf1ndex > foldGroups[-1]:
122
+ groupsOfFolds += 1
123
+ else:
124
+ dimensionsUnconstrained = dimensionsTotal
125
+ gap1ndexCeiling = gapRangeStart[leaf1ndex - 1]
126
+ indexDimension = 0
127
+ while indexDimension < dimensionsTotal:
128
+ leafConnectee = connectionGraph[indexDimension, leaf1ndex, leaf1ndex]
129
+ if leafConnectee == leaf1ndex:
130
+ dimensionsUnconstrained -= 1
131
+ else:
132
+ while leafConnectee != leaf1ndex:
133
+ gapsWhere[gap1ndexCeiling] = leafConnectee
134
+ if countDimensionsGapped[leafConnectee] == 0:
135
+ gap1ndexCeiling += 1
136
+ countDimensionsGapped[leafConnectee] += 1
137
+ leafConnectee = connectionGraph[indexDimension, leaf1ndex, leafBelow[leafConnectee]]
138
+ indexDimension += 1
139
+ indexMiniGap = gap1ndex
140
+ while indexMiniGap < gap1ndexCeiling:
141
+ gapsWhere[gap1ndex] = gapsWhere[indexMiniGap]
142
+ if countDimensionsGapped[gapsWhere[indexMiniGap]] == dimensionsUnconstrained:
143
+ gap1ndex += 1
144
+ countDimensionsGapped[gapsWhere[indexMiniGap]] = 0
145
+ indexMiniGap += 1
146
+ while leaf1ndex > 0 and gap1ndex == gapRangeStart[leaf1ndex - 1]:
147
+ leaf1ndex -= 1
148
+ leafBelow[leafAbove[leaf1ndex]] = leafBelow[leaf1ndex]
149
+ leafAbove[leafBelow[leaf1ndex]] = leafAbove[leaf1ndex]
150
+ if leaf1ndex > 0:
151
+ gap1ndex -= 1
152
+ leafAbove[leaf1ndex] = gapsWhere[gap1ndex]
153
+ leafBelow[leaf1ndex] = leafBelow[leafAbove[leaf1ndex]]
154
+ leafBelow[leafAbove[leaf1ndex]] = leaf1ndex
155
+ leafAbove[leafBelow[leaf1ndex]] = leaf1ndex
156
+ gapRangeStart[leaf1ndex] = gap1ndex
157
+ leaf1ndex += 1
158
+ foldGroups[taskIndex] = groupsOfFolds