mapFolding 0.6.0__py3-none-any.whl → 0.7.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (54) hide show
  1. mapFolding/__init__.py +6 -104
  2. mapFolding/basecamp.py +12 -8
  3. mapFolding/beDRY.py +103 -286
  4. mapFolding/filesystem.py +95 -0
  5. mapFolding/noHomeYet.py +20 -0
  6. mapFolding/oeis.py +46 -39
  7. mapFolding/reference/flattened.py +377 -0
  8. mapFolding/reference/hunterNumba.py +132 -0
  9. mapFolding/reference/irvineJavaPort.py +120 -0
  10. mapFolding/reference/jax.py +208 -0
  11. mapFolding/reference/lunnan.py +153 -0
  12. mapFolding/reference/lunnanNumpy.py +123 -0
  13. mapFolding/reference/lunnanWhile.py +121 -0
  14. mapFolding/reference/rotatedEntryPoint.py +240 -0
  15. mapFolding/reference/total_countPlus1vsPlusN.py +211 -0
  16. mapFolding/someAssemblyRequired/Z0Z_workbench.py +33 -0
  17. mapFolding/someAssemblyRequired/__init__.py +16 -0
  18. mapFolding/someAssemblyRequired/getLLVMforNoReason.py +21 -0
  19. mapFolding/someAssemblyRequired/ingredientsNumba.py +100 -0
  20. mapFolding/someAssemblyRequired/synthesizeCountingFunctions.py +7 -0
  21. mapFolding/someAssemblyRequired/synthesizeDataConverters.py +135 -0
  22. mapFolding/someAssemblyRequired/synthesizeNumba.py +91 -0
  23. mapFolding/someAssemblyRequired/synthesizeNumbaJob.py +417 -0
  24. mapFolding/someAssemblyRequired/synthesizeNumbaModules.py +91 -0
  25. mapFolding/someAssemblyRequired/transformationTools.py +425 -0
  26. mapFolding/someAssemblyRequired/whatWillBe.py +357 -0
  27. mapFolding/syntheticModules/__init__.py +0 -0
  28. mapFolding/syntheticModules/dataNamespaceFlattened.py +30 -0
  29. mapFolding/syntheticModules/multiprocessingCount_doTheNeedful.py +216 -0
  30. mapFolding/syntheticModules/numbaCount.py +90 -0
  31. mapFolding/syntheticModules/numbaCountExample.py +158 -0
  32. mapFolding/syntheticModules/numbaCountSequential.py +111 -0
  33. mapFolding/syntheticModules/numbaCount_doTheNeedful.py +13 -0
  34. mapFolding/syntheticModules/numba_doTheNeedful.py +12 -0
  35. mapFolding/syntheticModules/numba_doTheNeedfulExample.py +13 -0
  36. mapFolding/theDao.py +216 -229
  37. mapFolding/theSSOT.py +269 -101
  38. {mapfolding-0.6.0.dist-info → mapfolding-0.7.1.dist-info}/METADATA +7 -6
  39. mapfolding-0.7.1.dist-info/RECORD +51 -0
  40. {mapfolding-0.6.0.dist-info → mapfolding-0.7.1.dist-info}/WHEEL +1 -1
  41. {mapfolding-0.6.0.dist-info → mapfolding-0.7.1.dist-info}/top_level.txt +1 -0
  42. tests/__init__.py +0 -0
  43. tests/conftest.py +278 -0
  44. tests/test_computations.py +53 -0
  45. tests/test_filesystem.py +52 -0
  46. tests/test_oeis.py +128 -0
  47. tests/test_other.py +84 -0
  48. tests/test_tasks.py +56 -0
  49. mapFolding/theConfiguration.py +0 -58
  50. mapFolding/theSSOTdatatypes.py +0 -155
  51. mapFolding/theWrongWay.py +0 -7
  52. mapfolding-0.6.0.dist-info/RECORD +0 -16
  53. {mapfolding-0.6.0.dist-info → mapfolding-0.7.1.dist-info}/LICENSE +0 -0
  54. {mapfolding-0.6.0.dist-info → mapfolding-0.7.1.dist-info}/entry_points.txt +0 -0
@@ -0,0 +1,357 @@
1
+ """
2
+ - Settings for synthesizing the modules used by the package (i.e., the flow for numba)
3
+ - Settings for synthesizing modules that could be used by the package (e.g., the flow for JAX)
4
+ - Therefore, an abstracted system for creating settings for the package
5
+ - And with only a little more effort, an abstracted system for creating settings to synthesize arbitrary subsets of modules for arbitrary packages
6
+ """
7
+ from mapFolding.someAssemblyRequired.transformationTools import (
8
+ ast_Identifier,
9
+ executeActionUnlessDescendantMatches,
10
+ extractClassDef,
11
+ extractFunctionDef,
12
+ ifThis,
13
+ Make,
14
+ NodeCollector,
15
+ NodeReplacer,
16
+ strDotStrCuzPyStoopid,
17
+ Then,
18
+ )
19
+ from mapFolding.filesystem import writeStringToHere
20
+ from mapFolding.theSSOT import (
21
+ FREAKOUT,
22
+ getDatatypePackage,
23
+ getSourceAlgorithm,
24
+ theDataclassIdentifier,
25
+ theDataclassInstance,
26
+ theDispatcherCallable,
27
+ theFileExtension,
28
+ theFormatStrModuleForCallableSynthetic,
29
+ theFormatStrModuleSynthetic,
30
+ theLogicalPathModuleDataclass,
31
+ theLogicalPathModuleDispatcherSynthetic,
32
+ theModuleDispatcherSynthetic,
33
+ theModuleOfSyntheticModules,
34
+ thePackageName,
35
+ thePathPackage,
36
+ theSourceInitializeCallable,
37
+ theSourceParallelCallable,
38
+ theSourceSequentialCallable,
39
+ )
40
+ from autoflake import fix_code as autoflake_fix_code
41
+ from collections import defaultdict
42
+ from collections.abc import Sequence
43
+ from inspect import getsource as inspect_getsource
44
+ from mapFolding.someAssemblyRequired.ingredientsNumba import parametersNumbaDEFAULT, parametersNumbaSuperJit, parametersNumbaSuperJitParallel, ParametersNumba
45
+ from pathlib import Path, PurePosixPath
46
+ from types import ModuleType
47
+ from typing import NamedTuple
48
+ from Z0Z_tools import updateExtendPolishDictionaryLists
49
+ import ast
50
+ import dataclasses
51
+
52
+ @dataclasses.dataclass
53
+ class RecipeSynthesizeFlow:
54
+ """Settings for synthesizing flow."""
55
+ # TODO consider `IngredientsFlow` or similar
56
+ # ========================================
57
+ # Source
58
+ sourceAlgorithm: ModuleType = getSourceAlgorithm()
59
+ sourcePython: str = inspect_getsource(sourceAlgorithm)
60
+ source_astModule: ast.Module = ast.parse(sourcePython)
61
+ # https://github.com/hunterhogan/mapFolding/issues/4
62
+ sourceDispatcherCallable: str = theDispatcherCallable
63
+ sourceSequentialCallable: str = theSourceSequentialCallable
64
+ sourceDataclassIdentifier: str = theDataclassIdentifier
65
+ # I still hate the OOP paradigm. But I like this dataclass stuff.
66
+
67
+ # ========================================
68
+ # Filesystem
69
+ pathPackage: PurePosixPath = PurePosixPath(thePathPackage)
70
+ fileExtension: str = theFileExtension
71
+
72
+ # ========================================
73
+ # Logical identifiers
74
+ # meta
75
+ formatStrModuleSynthetic: str = theFormatStrModuleSynthetic
76
+ formatStrModuleForCallableSynthetic: str = theFormatStrModuleForCallableSynthetic
77
+
78
+ # Package
79
+ packageName: ast_Identifier = thePackageName
80
+
81
+ # Module
82
+ # https://github.com/hunterhogan/mapFolding/issues/4
83
+ Z0Z_flowLogicalPathRoot: str = theModuleOfSyntheticModules
84
+ moduleDispatcher: str = theModuleDispatcherSynthetic
85
+ logicalPathModuleDataclass: str = theLogicalPathModuleDataclass
86
+ # https://github.com/hunterhogan/mapFolding/issues/4
87
+ # `theLogicalPathModuleDispatcherSynthetic` is a problem. It is defined in theSSOT, but it can also be calculated.
88
+ logicalPathModuleDispatcher: str = theLogicalPathModuleDispatcherSynthetic
89
+ dataConverterModule: str = 'dataNamespaceFlattened'
90
+
91
+ # Function
92
+ sequentialCallable: str = sourceSequentialCallable
93
+ dataclassIdentifier: str = sourceDataclassIdentifier
94
+ dataConverterCallable: str = 'unpackDataclassPackUp'
95
+ dispatcherCallable: str = sourceDispatcherCallable
96
+
97
+ # Variable
98
+ dataclassInstance: str = theDataclassInstance
99
+
100
+ class LedgerOfImports:
101
+ def __init__(self, startWith: ast.AST | None = None) -> None:
102
+ self.dictionaryImportFrom: dict[str, list[tuple[str, str | None]]] = defaultdict(list)
103
+ self.listImport: list[str] = []
104
+
105
+ if startWith:
106
+ self.walkThis(startWith)
107
+
108
+ def addAst(self, astImport_: ast.Import | ast.ImportFrom) -> None:
109
+ if not isinstance(astImport_, (ast.Import, ast.ImportFrom)): # pyright: ignore[reportUnnecessaryIsInstance]
110
+ raise ValueError(f"Expected ast.Import or ast.ImportFrom, got {type(astImport_)}")
111
+ if isinstance(astImport_, ast.Import):
112
+ for alias in astImport_.names:
113
+ self.listImport.append(alias.name)
114
+ else:
115
+ if astImport_.module is not None:
116
+ for alias in astImport_.names:
117
+ self.dictionaryImportFrom[astImport_.module].append((alias.name, alias.asname))
118
+
119
+ def addImportStr(self, module: str) -> None:
120
+ self.listImport.append(module)
121
+
122
+ def addImportFromStr(self, module: str, name: str, asname: str | None = None) -> None:
123
+ self.dictionaryImportFrom[module].append((name, asname))
124
+
125
+ def exportListModuleNames(self) -> list[str]:
126
+ listModuleNames: list[str] = list(self.dictionaryImportFrom.keys())
127
+ listModuleNames.extend(self.listImport)
128
+ return sorted(set(listModuleNames))
129
+
130
+ def makeListAst(self) -> list[ast.ImportFrom | ast.Import]:
131
+ listAstImportFrom: list[ast.ImportFrom] = []
132
+
133
+ for module, listOfNameTuples in sorted(self.dictionaryImportFrom.items()):
134
+ listOfNameTuples = sorted(list(set(listOfNameTuples)), key=lambda nameTuple: nameTuple[0])
135
+ listAlias: list[ast.alias] = []
136
+ for name, asname in listOfNameTuples:
137
+ listAlias.append(Make.astAlias(name, asname))
138
+ listAstImportFrom.append(Make.astImportFrom(module, listAlias))
139
+
140
+ listAstImport: list[ast.Import] = [Make.astImport(name) for name in sorted(set(self.listImport))]
141
+ return listAstImportFrom + listAstImport
142
+
143
+ def update(self, *fromLedger: 'LedgerOfImports') -> None:
144
+ """
145
+ Update this ledger with imports from one or more other ledgers.
146
+
147
+ Parameters:
148
+ *fromTracker: One or more other `LedgerOfImports` objects from which to merge.
149
+ """
150
+ self.dictionaryImportFrom = updateExtendPolishDictionaryLists(self.dictionaryImportFrom, *(ledger.dictionaryImportFrom for ledger in fromLedger), destroyDuplicates=True, reorderLists=True)
151
+
152
+ for ledger in fromLedger:
153
+ self.listImport.extend(ledger.listImport)
154
+
155
+ def walkThis(self, walkThis: ast.AST) -> None:
156
+ for smurf in ast.walk(walkThis):
157
+ if isinstance(smurf, (ast.Import, ast.ImportFrom)):
158
+ self.addAst(smurf)
159
+
160
+ @dataclasses.dataclass
161
+ class Z0Z_IngredientsDataStructure:
162
+ """Everything necessary to create a data structure should be here."""
163
+ dataclassDef: ast.ClassDef
164
+ imports: LedgerOfImports = dataclasses.field(default_factory=LedgerOfImports)
165
+
166
+ @dataclasses.dataclass
167
+ class IngredientsFunction:
168
+ """Everything necessary to integrate a function into a module should be here."""
169
+ FunctionDef: ast.FunctionDef # hint `Make.astFunctionDef`
170
+ imports: LedgerOfImports = dataclasses.field(default_factory=LedgerOfImports)
171
+
172
+ @dataclasses.dataclass
173
+ class IngredientsModule:
174
+ """Everything necessary to create one _logical_ `ast.Module` should be here.
175
+ Extrinsic qualities should be handled externally, such as with `RecipeModule`."""
176
+ # If an `ast.Module` had a logical name that would be reasonable, but Python is firmly opposed
177
+ # to a reasonable namespace, therefore, Hunter, you were silly to add a `name` field to this
178
+ # dataclass for building an `ast.Module`.
179
+ # name: ast_Identifier
180
+ # Hey, genius, note that this is dataclasses.InitVar
181
+ ingredientsFunction: dataclasses.InitVar[Sequence[IngredientsFunction] | IngredientsFunction | None] = None
182
+
183
+ # `body` attribute of `ast.Module`
184
+ imports: LedgerOfImports = dataclasses.field(default_factory=LedgerOfImports)
185
+ prologue: list[ast.stmt] = dataclasses.field(default_factory=list)
186
+ functions: list[ast.FunctionDef | ast.stmt] = dataclasses.field(default_factory=list)
187
+ epilogue: list[ast.stmt] = dataclasses.field(default_factory=list)
188
+ launcher: list[ast.stmt] = dataclasses.field(default_factory=list)
189
+
190
+ # parameter for `ast.Module` constructor
191
+ type_ignores: list[ast.TypeIgnore] = dataclasses.field(default_factory=list)
192
+
193
+ def __post_init__(self, ingredientsFunction: Sequence[IngredientsFunction] | IngredientsFunction | None = None) -> None:
194
+ if ingredientsFunction is not None:
195
+ if isinstance(ingredientsFunction, IngredientsFunction):
196
+ self.addIngredientsFunction(ingredientsFunction)
197
+ else:
198
+ self.addIngredientsFunction(*ingredientsFunction)
199
+
200
+ def addIngredientsFunction(self, *ingredientsFunction: IngredientsFunction) -> None:
201
+ """Add one or more `IngredientsFunction`. """
202
+ listLedgers: list[LedgerOfImports] = []
203
+ for definition in ingredientsFunction:
204
+ self.functions.append(definition.FunctionDef)
205
+ listLedgers.append(definition.imports)
206
+ self.imports.update(*listLedgers)
207
+
208
+ def _makeModuleBody(self) -> list[ast.stmt]:
209
+ body: list[ast.stmt] = []
210
+ body.extend(self.imports.makeListAst())
211
+ body.extend(self.prologue)
212
+ body.extend(self.functions)
213
+ body.extend(self.epilogue)
214
+ body.extend(self.launcher)
215
+ # TODO `launcher`, if it exists, must start with `if __name__ == '__main__':` and be indented
216
+ return body
217
+
218
+ def export(self) -> ast.Module:
219
+ """Create a new `ast.Module` from the ingredients."""
220
+ return Make.astModule(self._makeModuleBody(), self.type_ignores)
221
+
222
+ @dataclasses.dataclass
223
+ class RecipeCountingFunction:
224
+ """Settings for synthesizing counting functions."""
225
+ ingredients: IngredientsFunction
226
+
227
+ @dataclasses.dataclass
228
+ class RecipeDispatchFunction:
229
+ # A "dispatcher" must receive a dataclass instance and return a dataclass instance.
230
+ # computationStateComplete: ComputationState = dispatcher(computationStateInitialized)
231
+ # The most critical values in the returned dataclass are foldGroups[0:-1] and leavesTotal
232
+ # self.foldsTotal = DatatypeFoldsTotal(self.foldGroups[0:-1].sum() * self.leavesTotal)
233
+ # the function name is required by IngredientsFunction
234
+ ingredients: IngredientsFunction
235
+ logicalPathModuleDataclass: str = theLogicalPathModuleDataclass
236
+ dataclassIdentifier: str = theDataclassIdentifier
237
+ dataclassInstance: str = theDataclassInstance
238
+ Z0Z_unpackDataclass: bool = True
239
+ countDispatcher: bool = True
240
+ # is this the countDispatcher or what is the information for calling the countDispatcher: import or no? callable identifier? parameters? return type?
241
+ # countDispatcher lives in `theLogicalPathModuleDispatcherSynthetic`
242
+ # countDispatcher is named `theDispatcherCallable`
243
+ # post init
244
+ # addImportFromStr(self, module: str, name: str, asname: str | None = None)
245
+
246
+ @dataclasses.dataclass
247
+ class RecipeModule:
248
+ """How to get one or more logical `ast.Module` on disk as one physical module."""
249
+ # Physical namespace
250
+ filenameStem: str
251
+ fileExtension: str = theFileExtension
252
+ pathPackage: PurePosixPath = PurePosixPath(thePathPackage)
253
+
254
+ # Physical and logical namespace
255
+ packageName: ast_Identifier | None= thePackageName
256
+ logicalPathINFIX: ast_Identifier | strDotStrCuzPyStoopid | None = None # module names other than the module itself and the package name
257
+
258
+ def _getLogicalPathParent(self) -> str | None:
259
+ listModules: list[ast_Identifier] = []
260
+ if self.packageName:
261
+ listModules.append(self.packageName)
262
+ if self.logicalPathINFIX:
263
+ listModules.append(self.logicalPathINFIX)
264
+ if listModules:
265
+ return '.'.join(listModules)
266
+
267
+ def _getLogicalPathAbsolute(self) -> str:
268
+ listModules: list[ast_Identifier] = []
269
+ logicalPathParent: str | None = self._getLogicalPathParent()
270
+ if logicalPathParent:
271
+ listModules.append(logicalPathParent)
272
+ listModules.append(self.filenameStem)
273
+ return '.'.join(listModules)
274
+
275
+ @property
276
+ def pathFilename(self):
277
+ """ `PurePosixPath` ensures os-independent formatting of the `dataclass.field` value,
278
+ but you must convert to `Path` to perform filesystem operations."""
279
+ pathRoot: PurePosixPath = self.pathPackage
280
+ filename: str = self.filenameStem + self.fileExtension
281
+ if self.logicalPathINFIX:
282
+ whyIsThisStillAThing: list[str] = self.logicalPathINFIX.split('.')
283
+ pathRoot = pathRoot.joinpath(*whyIsThisStillAThing)
284
+ return pathRoot.joinpath(filename)
285
+
286
+ ingredients: IngredientsModule = IngredientsModule()
287
+
288
+ @property
289
+ def absoluteImport(self) -> ast.Import:
290
+ return Make.astImport(self._getLogicalPathAbsolute())
291
+
292
+ @property
293
+ def absoluteImportFrom(self) -> ast.ImportFrom:
294
+ """ `from . import theModule` """
295
+ logicalPathParent: str = self._getLogicalPathParent() or '.'
296
+ return Make.astImportFrom(logicalPathParent, [Make.astAlias(self.filenameStem)])
297
+
298
+ def writeModule(self) -> None:
299
+ astModule = self.ingredients.export()
300
+ ast.fix_missing_locations(astModule)
301
+ pythonSource: str = ast.unparse(astModule)
302
+ if not pythonSource: raise FREAKOUT
303
+ autoflake_additional_imports: list[str] = self.ingredients.imports.exportListModuleNames()
304
+ if self.packageName:
305
+ autoflake_additional_imports.append(self.packageName)
306
+ pythonSource = autoflake_fix_code(pythonSource, autoflake_additional_imports, expand_star_imports=False, remove_all_unused_imports=False, remove_duplicate_keys = False, remove_unused_variables = False,)
307
+ writeStringToHere(pythonSource, self.pathFilename)
308
+
309
+ numbaFlow: RecipeSynthesizeFlow = RecipeSynthesizeFlow()
310
+
311
+ # https://github.com/hunterhogan/mapFolding/issues/3
312
+ sourceSequentialFunctionDef = extractFunctionDef(numbaFlow.sourceSequentialCallable, numbaFlow.source_astModule)
313
+ if sourceSequentialFunctionDef is None: raise FREAKOUT
314
+
315
+ numbaCountSequential = RecipeCountingFunction(IngredientsFunction(
316
+ FunctionDef=sourceSequentialFunctionDef,
317
+ imports=LedgerOfImports(numbaFlow.source_astModule)
318
+ ))
319
+
320
+ numbaDispatcher = RecipeModule(filenameStem=numbaFlow.moduleDispatcher, fileExtension=numbaFlow.fileExtension, pathPackage=numbaFlow.pathPackage,
321
+ packageName=numbaFlow.packageName, logicalPathINFIX=numbaFlow.Z0Z_flowLogicalPathRoot)
322
+
323
+ class ParametersSynthesizeNumbaCallable(NamedTuple):
324
+ callableTarget: str
325
+ parametersNumba: ParametersNumba | None = None
326
+ inlineCallables: bool = False
327
+
328
+ listNumbaCallableDispatchees: list[ParametersSynthesizeNumbaCallable] = [
329
+ ParametersSynthesizeNumbaCallable('countParallel', parametersNumbaSuperJitParallel, True),
330
+ ParametersSynthesizeNumbaCallable('countSequential', parametersNumbaSuperJit, True),
331
+ ParametersSynthesizeNumbaCallable('countInitialize', parametersNumbaDEFAULT, True),
332
+ ]
333
+
334
+ _datatypeModuleScalar = ''
335
+ _decoratorCallable = ''
336
+
337
+ # if numba
338
+ _datatypeModuleScalar = 'numba'
339
+ _decoratorCallable = 'jit'
340
+ Z0Z_autoflake_additional_imports: list[str] = []
341
+ Z0Z_autoflake_additional_imports.append('numba')
342
+
343
+ def Z0Z_getDatatypeModuleScalar() -> str:
344
+ return _datatypeModuleScalar
345
+
346
+ def Z0Z_setDatatypeModuleScalar(moduleName: str) -> str:
347
+ global _datatypeModuleScalar
348
+ _datatypeModuleScalar = moduleName
349
+ return _datatypeModuleScalar
350
+
351
+ def Z0Z_getDecoratorCallable() -> str:
352
+ return _decoratorCallable
353
+
354
+ def Z0Z_setDecoratorCallable(decoratorName: str) -> str:
355
+ global _decoratorCallable
356
+ _decoratorCallable = decoratorName
357
+ return _decoratorCallable
File without changes
@@ -0,0 +1,30 @@
1
+ from mapFolding.someAssemblyRequired.synthesizeDataConverters import makeStateJob
2
+ from mapFolding.syntheticModules.numbaCount_doTheNeedful import doTheNeedful
3
+ from mapFolding.theSSOT import Array1DElephino, Array1DFoldsTotal, Array1DLeavesTotal, Array3D, ComputationState, DatatypeElephino, DatatypeFoldsTotal, DatatypeLeavesTotal
4
+
5
+ def flattenData(state: ComputationState) -> ComputationState:
6
+ state = makeStateJob(state.mapShape, writeJob=False)
7
+ mapShape: tuple[DatatypeLeavesTotal, ...] = state.mapShape
8
+ leavesTotal: DatatypeLeavesTotal = state.leavesTotal
9
+ taskDivisions: DatatypeLeavesTotal = state.taskDivisions
10
+ connectionGraph: Array3D = state.connectionGraph
11
+ dimensionsTotal: DatatypeLeavesTotal = state.dimensionsTotal
12
+ countDimensionsGapped: Array1DLeavesTotal = state.countDimensionsGapped
13
+ dimensionsUnconstrained: DatatypeLeavesTotal = state.dimensionsUnconstrained
14
+ gapRangeStart: Array1DElephino = state.gapRangeStart
15
+ gapsWhere: Array1DLeavesTotal = state.gapsWhere
16
+ leafAbove: Array1DLeavesTotal = state.leafAbove
17
+ leafBelow: Array1DLeavesTotal = state.leafBelow
18
+ foldGroups: Array1DFoldsTotal = state.foldGroups
19
+ foldsTotal: DatatypeFoldsTotal = state.foldsTotal
20
+ gap1ndex: DatatypeLeavesTotal = state.gap1ndex
21
+ gap1ndexCeiling: DatatypeElephino = state.gap1ndexCeiling
22
+ groupsOfFolds: DatatypeFoldsTotal = state.groupsOfFolds
23
+ indexDimension: DatatypeLeavesTotal = state.indexDimension
24
+ indexLeaf: DatatypeLeavesTotal = state.indexLeaf
25
+ indexMiniGap: DatatypeElephino = state.indexMiniGap
26
+ leaf1ndex: DatatypeElephino = state.leaf1ndex
27
+ leafConnectee: DatatypeElephino = state.leafConnectee
28
+ taskIndex: DatatypeLeavesTotal = state.taskIndex
29
+ mapShape, leavesTotal, taskDivisions, connectionGraph, dimensionsTotal, countDimensionsGapped, dimensionsUnconstrained, gapRangeStart, gapsWhere, leafAbove, leafBelow, foldGroups, foldsTotal, gap1ndex, gap1ndexCeiling, groupsOfFolds, indexDimension, indexLeaf, indexMiniGap, leaf1ndex, leafConnectee, taskIndex = doTheNeedful(mapShape, leavesTotal, taskDivisions, connectionGraph, dimensionsTotal, countDimensionsGapped, dimensionsUnconstrained, gapRangeStart, gapsWhere, leafAbove, leafBelow, foldGroups, foldsTotal, gap1ndex, gap1ndexCeiling, groupsOfFolds, indexDimension, indexLeaf, indexMiniGap, leaf1ndex, leafConnectee, taskIndex)
30
+ return ComputationState(mapShape=mapShape, leavesTotal=leavesTotal, taskDivisions=taskDivisions, countDimensionsGapped=countDimensionsGapped, dimensionsUnconstrained=dimensionsUnconstrained, gapRangeStart=gapRangeStart, gapsWhere=gapsWhere, leafAbove=leafAbove, leafBelow=leafBelow, foldGroups=foldGroups, foldsTotal=foldsTotal, gap1ndex=gap1ndex, gap1ndexCeiling=gap1ndexCeiling, groupsOfFolds=groupsOfFolds, indexDimension=indexDimension, indexLeaf=indexLeaf, indexMiniGap=indexMiniGap, leaf1ndex=leaf1ndex, leafConnectee=leafConnectee, taskIndex=taskIndex)
@@ -0,0 +1,216 @@
1
+ from concurrent.futures import ProcessPoolExecutor
2
+ from mapFolding.theSSOT import ComputationState
3
+ import concurrent.futures
4
+ import copy
5
+ import multiprocessing
6
+
7
+ def activeLeafConnectedToItself(state: ComputationState) -> bool:
8
+ return state.leafConnectee == state.leaf1ndex
9
+
10
+ def activeLeafGreaterThan0(state: ComputationState) -> bool:
11
+ return state.leaf1ndex > 0
12
+
13
+ def activeLeafGreaterThanLeavesTotal(state: ComputationState) -> bool:
14
+ return state.leaf1ndex > state.leavesTotal
15
+
16
+ def activeLeafIsTheFirstLeaf(state: ComputationState) -> bool:
17
+ return state.leaf1ndex <= 1
18
+
19
+ def allDimensionsAreUnconstrained(state: ComputationState) -> bool:
20
+ return not state.dimensionsUnconstrained
21
+
22
+ def backtrack(state: ComputationState) -> ComputationState:
23
+ state.leaf1ndex -= 1
24
+ state.leafBelow[state.leafAbove[state.leaf1ndex]] = state.leafBelow[state.leaf1ndex]
25
+ state.leafAbove[state.leafBelow[state.leaf1ndex]] = state.leafAbove[state.leaf1ndex]
26
+ return state
27
+
28
+ def countGaps(state: ComputationState) -> ComputationState:
29
+ state.gapsWhere[state.gap1ndexCeiling] = state.leafConnectee
30
+ if state.countDimensionsGapped[state.leafConnectee] == 0:
31
+ state = incrementGap1ndexCeiling(state)
32
+ state.countDimensionsGapped[state.leafConnectee] += 1
33
+ return state
34
+
35
+ def decrementDimensionsUnconstrained(state: ComputationState) -> ComputationState:
36
+ state.dimensionsUnconstrained -= 1
37
+ return state
38
+
39
+ def dimensionsUnconstrainedCondition(state: ComputationState) -> bool:
40
+ return state.connectionGraph[state.indexDimension, state.leaf1ndex, state.leaf1ndex] == state.leaf1ndex
41
+
42
+ def filterCommonGaps(state: ComputationState) -> ComputationState:
43
+ state.gapsWhere[state.gap1ndex] = state.gapsWhere[state.indexMiniGap]
44
+ if state.countDimensionsGapped[state.gapsWhere[state.indexMiniGap]] == state.dimensionsUnconstrained:
45
+ state = incrementActiveGap(state)
46
+ state.countDimensionsGapped[state.gapsWhere[state.indexMiniGap]] = 0
47
+ return state
48
+
49
+ def incrementActiveGap(state: ComputationState) -> ComputationState:
50
+ state.gap1ndex += 1
51
+ return state
52
+
53
+ def incrementGap1ndexCeiling(state: ComputationState) -> ComputationState:
54
+ state.gap1ndexCeiling += 1
55
+ return state
56
+
57
+ def incrementIndexDimension(state: ComputationState) -> ComputationState:
58
+ state.indexDimension += 1
59
+ return state
60
+
61
+ def incrementIndexMiniGap(state: ComputationState) -> ComputationState:
62
+ state.indexMiniGap += 1
63
+ return state
64
+
65
+ def initializeIndexMiniGap(state: ComputationState) -> ComputationState:
66
+ state.indexMiniGap = state.gap1ndex
67
+ return state
68
+
69
+ def initializeLeafConnectee(state: ComputationState) -> ComputationState:
70
+ state.leafConnectee = state.connectionGraph[state.indexDimension, state.leaf1ndex, state.leaf1ndex]
71
+ return state
72
+
73
+ def initializeVariablesToFindGaps(state: ComputationState) -> ComputationState:
74
+ state.dimensionsUnconstrained = state.dimensionsTotal
75
+ state.gap1ndexCeiling = state.gapRangeStart[state.leaf1ndex - 1]
76
+ state.indexDimension = 0
77
+ return state
78
+
79
+ def insertUnconstrainedLeaf(state: ComputationState) -> ComputationState:
80
+ indexLeaf = 0
81
+ while indexLeaf < state.leaf1ndex:
82
+ state.gapsWhere[state.gap1ndexCeiling] = indexLeaf
83
+ state.gap1ndexCeiling += 1
84
+ indexLeaf += 1
85
+ return state
86
+
87
+ def leafBelowSentinelIs1(state: ComputationState) -> bool:
88
+ return state.leafBelow[0] == 1
89
+
90
+ def loopingLeavesConnectedToActiveLeaf(state: ComputationState) -> bool:
91
+ return state.leafConnectee != state.leaf1ndex
92
+
93
+ def loopingToActiveGapCeiling(state: ComputationState) -> bool:
94
+ return state.indexMiniGap < state.gap1ndexCeiling
95
+
96
+ def loopUpToDimensionsTotal(state: ComputationState) -> bool:
97
+ return state.indexDimension < state.dimensionsTotal
98
+
99
+ def noGapsHere(state: ComputationState) -> bool:
100
+ return (state.leaf1ndex > 0) and (state.gap1ndex == state.gapRangeStart[state.leaf1ndex - 1])
101
+
102
+ def placeLeaf(state: ComputationState) -> ComputationState:
103
+ state.gap1ndex -= 1
104
+ state.leafAbove[state.leaf1ndex] = state.gapsWhere[state.gap1ndex]
105
+ state.leafBelow[state.leaf1ndex] = state.leafBelow[state.leafAbove[state.leaf1ndex]]
106
+ state.leafBelow[state.leafAbove[state.leaf1ndex]] = state.leaf1ndex
107
+ state.leafAbove[state.leafBelow[state.leaf1ndex]] = state.leaf1ndex
108
+ state.gapRangeStart[state.leaf1ndex] = state.gap1ndex
109
+ state.leaf1ndex += 1
110
+ return state
111
+
112
+ def thereIsAnActiveLeaf(state: ComputationState) -> bool:
113
+ return state.leaf1ndex > 0
114
+
115
+ def thisIsMyTaskIndex(state: ComputationState) -> bool:
116
+ return (state.leaf1ndex != state.taskDivisions) or (state.leafConnectee % state.taskDivisions == state.taskIndex)
117
+
118
+ def updateLeafConnectee(state: ComputationState) -> ComputationState:
119
+ state.leafConnectee = state.connectionGraph[state.indexDimension, state.leaf1ndex, state.leafBelow[state.leafConnectee]]
120
+ return state
121
+
122
+ def countInitialize(state: ComputationState) -> ComputationState:
123
+ while activeLeafGreaterThan0(state):
124
+ if activeLeafIsTheFirstLeaf(state) or leafBelowSentinelIs1(state):
125
+ state = initializeVariablesToFindGaps(state)
126
+ while loopUpToDimensionsTotal(state):
127
+ state = initializeLeafConnectee(state)
128
+ if activeLeafConnectedToItself(state):
129
+ state = decrementDimensionsUnconstrained(state)
130
+ else:
131
+ while loopingLeavesConnectedToActiveLeaf(state):
132
+ state = countGaps(state)
133
+ state = updateLeafConnectee(state)
134
+ state = incrementIndexDimension(state)
135
+ if allDimensionsAreUnconstrained(state):
136
+ state = insertUnconstrainedLeaf(state)
137
+ state = initializeIndexMiniGap(state)
138
+ while loopingToActiveGapCeiling(state):
139
+ state = filterCommonGaps(state)
140
+ state = incrementIndexMiniGap(state)
141
+ if thereIsAnActiveLeaf(state):
142
+ state = placeLeaf(state)
143
+ if state.gap1ndex > 0:
144
+ break
145
+ return state
146
+
147
+ def countParallel(state: ComputationState) -> ComputationState:
148
+ while activeLeafGreaterThan0(state):
149
+ if activeLeafIsTheFirstLeaf(state) or leafBelowSentinelIs1(state):
150
+ if activeLeafGreaterThanLeavesTotal(state):
151
+ state.groupsOfFolds += 1
152
+ else:
153
+ state = initializeVariablesToFindGaps(state)
154
+ while loopUpToDimensionsTotal(state):
155
+ if dimensionsUnconstrainedCondition(state):
156
+ state = decrementDimensionsUnconstrained(state)
157
+ else:
158
+ state = initializeLeafConnectee(state)
159
+ while loopingLeavesConnectedToActiveLeaf(state):
160
+ if thisIsMyTaskIndex(state):
161
+ state = countGaps(state)
162
+ state = updateLeafConnectee(state)
163
+ state = incrementIndexDimension(state)
164
+ state = initializeIndexMiniGap(state)
165
+ while loopingToActiveGapCeiling(state):
166
+ state = filterCommonGaps(state)
167
+ state = incrementIndexMiniGap(state)
168
+ while noGapsHere(state):
169
+ state = backtrack(state)
170
+ if thereIsAnActiveLeaf(state):
171
+ state = placeLeaf(state)
172
+ state.foldGroups[state.taskIndex] = state.groupsOfFolds
173
+ return state
174
+
175
+ def countSequential(state: ComputationState) -> ComputationState:
176
+ while activeLeafGreaterThan0(state):
177
+ if activeLeafIsTheFirstLeaf(state) or leafBelowSentinelIs1(state):
178
+ if activeLeafGreaterThanLeavesTotal(state):
179
+ state.groupsOfFolds += 1
180
+ else:
181
+ state = initializeVariablesToFindGaps(state)
182
+ while loopUpToDimensionsTotal(state):
183
+ state = initializeLeafConnectee(state)
184
+ if activeLeafConnectedToItself(state):
185
+ state = decrementDimensionsUnconstrained(state)
186
+ else:
187
+ while loopingLeavesConnectedToActiveLeaf(state):
188
+ state = countGaps(state)
189
+ state = updateLeafConnectee(state)
190
+ state = incrementIndexDimension(state)
191
+ state = initializeIndexMiniGap(state)
192
+ while loopingToActiveGapCeiling(state):
193
+ state = filterCommonGaps(state)
194
+ state = incrementIndexMiniGap(state)
195
+ while noGapsHere(state):
196
+ state = backtrack(state)
197
+ if thereIsAnActiveLeaf(state):
198
+ state = placeLeaf(state)
199
+ state.foldGroups[state.taskIndex] = state.groupsOfFolds
200
+ return state
201
+
202
+ def doTheNeedful(state: ComputationState) -> ComputationState:
203
+ state = countInitialize(state)
204
+ if state.taskDivisions > 0:
205
+ multiprocessing.set_start_method('spawn')
206
+ dictionaryConcurrency: dict[int, concurrent.futures.Future[ComputationState]] = {}
207
+ with ProcessPoolExecutor() as concurrencyManager:
208
+ for indexSherpa in range(state.taskDivisions):
209
+ stateParallel = copy.deepcopy(state)
210
+ stateParallel.taskIndex = indexSherpa
211
+ dictionaryConcurrency[indexSherpa] = concurrencyManager.submit(countParallel, stateParallel)
212
+ for indexSherpa in range(state.taskDivisions):
213
+ state.foldGroups[indexSherpa] = dictionaryConcurrency[indexSherpa].result().foldGroups[indexSherpa]
214
+ return state
215
+ else:
216
+ return countSequential(state)