mapFolding 0.15.3__py3-none-any.whl → 0.16.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- mapFolding/__init__.py +22 -20
- mapFolding/_theSSOT.py +56 -62
- mapFolding/_theTypes.py +66 -4
- mapFolding/algorithms/__init__.py +1 -0
- mapFolding/{daoOfMapFolding.py → algorithms/daoOfMapFolding.py} +1 -2
- mapFolding/algorithms/getBucketsTotal.py +137 -0
- mapFolding/algorithms/matrixMeanders.py +519 -0
- mapFolding/algorithms/oeisIDbyFormula.py +347 -0
- mapFolding/algorithms/zCuzDocStoopidoeisIDbyFormula.py +84 -0
- mapFolding/basecamp.py +151 -14
- mapFolding/dataBaskets.py +74 -0
- mapFolding/oeis.py +43 -56
- mapFolding/reference/A000682facts.py +662 -0
- mapFolding/reference/A005316facts.py +62 -0
- mapFolding/reference/matrixMeandersAnalysis/__init__.py +1 -0
- mapFolding/reference/matrixMeandersAnalysis/evenEven.py +144 -0
- mapFolding/reference/matrixMeandersAnalysis/oddEven.py +54 -0
- mapFolding/{_oeisFormulas/matrixMeanders64.py → reference/meandersDumpingGround/matrixMeanders64retired.py} +37 -29
- mapFolding/someAssemblyRequired/A007822/A007822rawMaterials.py +55 -0
- mapFolding/someAssemblyRequired/A007822/__init__.py +0 -0
- mapFolding/someAssemblyRequired/A007822/makeA007822AsynchronousModules.py +185 -0
- mapFolding/someAssemblyRequired/A007822/makeA007822Modules.py +71 -0
- mapFolding/someAssemblyRequired/RecipeJob.py +2 -2
- mapFolding/someAssemblyRequired/__init__.py +9 -2
- mapFolding/someAssemblyRequired/_toolIfThis.py +4 -3
- mapFolding/someAssemblyRequired/_toolkitContainers.py +8 -8
- mapFolding/someAssemblyRequired/infoBooth.py +27 -30
- mapFolding/someAssemblyRequired/makeJobTheorem2Numba.py +1 -1
- mapFolding/someAssemblyRequired/makeJobTheorem2codon.py +5 -2
- mapFolding/someAssemblyRequired/makingModules_count.py +301 -0
- mapFolding/someAssemblyRequired/makingModules_doTheNeedful.py +120 -0
- mapFolding/someAssemblyRequired/mapFolding/__init__.py +0 -0
- mapFolding/someAssemblyRequired/mapFolding/makeMapFoldingModules.py +220 -0
- mapFolding/someAssemblyRequired/toolkitMakeModules.py +152 -0
- mapFolding/someAssemblyRequired/toolkitNumba.py +1 -1
- mapFolding/someAssemblyRequired/transformationTools.py +1 -0
- mapFolding/syntheticModules/A007822/__init__.py +1 -0
- mapFolding/syntheticModules/A007822/asynchronous.py +148 -0
- mapFolding/syntheticModules/A007822/asynchronousAnnex.py +68 -0
- mapFolding/syntheticModules/A007822/asynchronousTheorem2.py +53 -0
- mapFolding/syntheticModules/A007822/asynchronousTrimmed.py +47 -0
- mapFolding/syntheticModules/dataPackingA007822.py +1 -1
- mapFolding/tests/conftest.py +28 -9
- mapFolding/tests/test_computations.py +32 -10
- mapFolding/tests/test_oeis.py +2 -20
- mapFolding/trim_memory.py +62 -0
- mapFolding/zCuzDocStoopid/__init__.py +1 -0
- mapFolding/zCuzDocStoopid/makeDocstrings.py +63 -0
- {mapfolding-0.15.3.dist-info → mapfolding-0.16.0.dist-info}/METADATA +9 -2
- mapfolding-0.16.0.dist-info/RECORD +100 -0
- {mapfolding-0.15.3.dist-info → mapfolding-0.16.0.dist-info}/entry_points.txt +0 -1
- mapFolding/_oeisFormulas/A000136.py +0 -4
- mapFolding/_oeisFormulas/A000560.py +0 -4
- mapFolding/_oeisFormulas/A000682.py +0 -85
- mapFolding/_oeisFormulas/A001010.py +0 -19
- mapFolding/_oeisFormulas/A001011.py +0 -5
- mapFolding/_oeisFormulas/A005315.py +0 -4
- mapFolding/_oeisFormulas/A005316.py +0 -10
- mapFolding/_oeisFormulas/A223094.py +0 -7
- mapFolding/_oeisFormulas/A259702.py +0 -4
- mapFolding/_oeisFormulas/A301620.py +0 -6
- mapFolding/_oeisFormulas/Z0Z_aOFn.py +0 -34
- mapFolding/_oeisFormulas/Z0Z_notes.py +0 -16
- mapFolding/_oeisFormulas/Z0Z_oeisMeanders.py +0 -74
- mapFolding/_oeisFormulas/Z0Z_symmetry.py +0 -131
- mapFolding/_oeisFormulas/__init__.py +0 -1
- mapFolding/_oeisFormulas/matrixMeanders.py +0 -134
- mapFolding/_oeisFormulas/matrixMeandersAnnex.py +0 -84
- mapFolding/someAssemblyRequired/A007822rawMaterials.py +0 -46
- mapFolding/someAssemblyRequired/makeAllModules.py +0 -764
- mapfolding-0.15.3.dist-info/RECORD +0 -92
- /mapFolding/reference/{A005316JavaPort.py → meandersDumpingGround/A005316JavaPort.py} +0 -0
- /mapFolding/reference/{A005316imperative.py → meandersDumpingGround/A005316imperative.py} +0 -0
- /mapFolding/reference/{A005316intOptimized.py → meandersDumpingGround/A005316intOptimized.py} +0 -0
- /mapFolding/reference/{A005316optimized128bit.py → meandersDumpingGround/A005316optimized128bit.py} +0 -0
- /mapFolding/reference/{A005316primitiveOptimized.py → meandersDumpingGround/A005316primitiveOptimized.py} +0 -0
- /mapFolding/reference/{A005316redis.py → meandersDumpingGround/A005316redis.py} +0 -0
- /mapFolding/reference/{A005316write2disk.py → meandersDumpingGround/A005316write2disk.py} +0 -0
- /mapFolding/reference/{matrixMeandersBaseline.py → meandersDumpingGround/matrixMeandersBaseline.py} +0 -0
- /mapFolding/reference/{matrixMeandersBaselineAnnex.py → meandersDumpingGround/matrixMeandersBaselineAnnex.py} +0 -0
- /mapFolding/reference/{matrixMeandersBaselineV2.py → meandersDumpingGround/matrixMeandersBaselineV2.py} +0 -0
- /mapFolding/reference/{matrixMeandersSimpleQueue.py → meandersDumpingGround/matrixMeandersSimpleQueue.py} +0 -0
- /mapFolding/reference/{matrixMeandersSlicePop.py → meandersDumpingGround/matrixMeandersSlicePop.py} +0 -0
- /mapFolding/syntheticModules/{algorithmA007822.py → A007822/algorithm.py} +0 -0
- /mapFolding/syntheticModules/{algorithmA007822Numba.py → A007822/algorithmNumba.py} +0 -0
- /mapFolding/syntheticModules/{initializeStateA007822.py → A007822/initializeState.py} +0 -0
- /mapFolding/syntheticModules/{theorem2A007822.py → A007822/theorem2.py} +0 -0
- /mapFolding/syntheticModules/{theorem2A007822Numba.py → A007822/theorem2Numba.py} +0 -0
- /mapFolding/syntheticModules/{theorem2A007822Trimmed.py → A007822/theorem2Trimmed.py} +0 -0
- {mapfolding-0.15.3.dist-info → mapfolding-0.16.0.dist-info}/WHEEL +0 -0
- {mapfolding-0.15.3.dist-info → mapfolding-0.16.0.dist-info}/licenses/LICENSE +0 -0
- {mapfolding-0.15.3.dist-info → mapfolding-0.16.0.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,220 @@
|
|
|
1
|
+
"""makeMapFoldingModules."""
|
|
2
|
+
from astToolkit import (
|
|
3
|
+
astModuleToIngredientsFunction, Be, DOT, extractClassDef, Grab, hasDOTbody, identifierDotAttribute,
|
|
4
|
+
IngredientsFunction, IngredientsModule, LedgerOfImports, Make, NodeChanger, NodeTourist, parseLogicalPath2astModule,
|
|
5
|
+
parsePathFilename2astModule, Then)
|
|
6
|
+
from astToolkit.transformationTools import inlineFunctionDef, removeUnusedParameters, write_astModule
|
|
7
|
+
from hunterMakesPy import importLogicalPath2Identifier, raiseIfNone
|
|
8
|
+
from mapFolding import packageSettings
|
|
9
|
+
from mapFolding.someAssemblyRequired import (
|
|
10
|
+
DeReConstructField2ast, identifierCallableSourceDEFAULT, identifierCallableSourceDispatcherDEFAULT, IfThis,
|
|
11
|
+
logicalPathInfixDEFAULT, ShatteredDataclass)
|
|
12
|
+
from mapFolding.someAssemblyRequired.makingModules_count import (
|
|
13
|
+
makeDaoOfMapFoldingNumba, makeTheorem2, numbaOnTheorem2, trimTheorem2)
|
|
14
|
+
from mapFolding.someAssemblyRequired.makingModules_doTheNeedful import makeInitializeState, makeUnRePackDataclass
|
|
15
|
+
from mapFolding.someAssemblyRequired.toolkitMakeModules import getLogicalPath, getModule, getPathFilename
|
|
16
|
+
from mapFolding.someAssemblyRequired.toolkitNumba import decorateCallableWithNumba, parametersNumbaLight
|
|
17
|
+
from mapFolding.someAssemblyRequired.transformationTools import (
|
|
18
|
+
removeDataclassFromFunction, shatter_dataclassesDOTdataclass, unpackDataclassCallFunctionRepackDataclass)
|
|
19
|
+
from os import PathLike
|
|
20
|
+
from pathlib import PurePath
|
|
21
|
+
from typing import Any, TYPE_CHECKING
|
|
22
|
+
import ast
|
|
23
|
+
import dataclasses
|
|
24
|
+
|
|
25
|
+
if TYPE_CHECKING:
|
|
26
|
+
from collections.abc import Sequence
|
|
27
|
+
|
|
28
|
+
def makeDaoOfMapFoldingParallelNumba(astModule: ast.Module, moduleIdentifier: str, callableIdentifier: str | None = None, logicalPathInfix: PathLike[str] | PurePath | str | None = None, sourceCallableDispatcher: str | None = None) -> PurePath: # noqa: ARG001
|
|
29
|
+
"""Generate parallel implementation with concurrent execution and task division.
|
|
30
|
+
|
|
31
|
+
Parameters
|
|
32
|
+
----------
|
|
33
|
+
astModule : ast.Module
|
|
34
|
+
Source module containing the base algorithm.
|
|
35
|
+
moduleIdentifier : str
|
|
36
|
+
Name for the generated parallel module.
|
|
37
|
+
callableIdentifier : str | None = None
|
|
38
|
+
Name for the core parallel counting function.
|
|
39
|
+
logicalPathInfix : PathLike[str] | PurePath | str | None = None
|
|
40
|
+
Directory path for organizing the generated module.
|
|
41
|
+
sourceCallableDispatcher : str | None = None
|
|
42
|
+
Optional dispatcher function identifier.
|
|
43
|
+
|
|
44
|
+
Returns
|
|
45
|
+
-------
|
|
46
|
+
pathFilename : PurePath
|
|
47
|
+
Filesystem path where the parallel module was written.
|
|
48
|
+
|
|
49
|
+
"""
|
|
50
|
+
sourceCallableIdentifier = identifierCallableSourceDEFAULT
|
|
51
|
+
if callableIdentifier is None:
|
|
52
|
+
callableIdentifier = sourceCallableIdentifier
|
|
53
|
+
ingredientsFunction = IngredientsFunction(inlineFunctionDef(sourceCallableIdentifier, astModule), LedgerOfImports(astModule))
|
|
54
|
+
ingredientsFunction.astFunctionDef.name = callableIdentifier
|
|
55
|
+
|
|
56
|
+
dataclassName: ast.expr = raiseIfNone(NodeTourist(Be.arg, Then.extractIt(DOT.annotation)).captureLastMatch(ingredientsFunction.astFunctionDef))
|
|
57
|
+
dataclassIdentifier: str = raiseIfNone(NodeTourist(Be.Name, Then.extractIt(DOT.id)).captureLastMatch(dataclassName))
|
|
58
|
+
|
|
59
|
+
dataclassLogicalPathModule = None
|
|
60
|
+
for moduleWithLogicalPath, listNameTuples in ingredientsFunction.imports._dictionaryImportFrom.items(): # noqa: SLF001
|
|
61
|
+
for nameTuple in listNameTuples:
|
|
62
|
+
if nameTuple[0] == dataclassIdentifier:
|
|
63
|
+
dataclassLogicalPathModule = moduleWithLogicalPath
|
|
64
|
+
break
|
|
65
|
+
if dataclassLogicalPathModule:
|
|
66
|
+
break
|
|
67
|
+
if dataclassLogicalPathModule is None:
|
|
68
|
+
raise Exception # noqa: TRY002
|
|
69
|
+
dataclassInstanceIdentifier: identifierDotAttribute = raiseIfNone(NodeTourist(Be.arg, Then.extractIt(DOT.arg)).captureLastMatch(ingredientsFunction.astFunctionDef))
|
|
70
|
+
shatteredDataclass: ShatteredDataclass = shatter_dataclassesDOTdataclass(dataclassLogicalPathModule, dataclassIdentifier, dataclassInstanceIdentifier)
|
|
71
|
+
|
|
72
|
+
# START add the parallel state fields to the count function ------------------------------------------------
|
|
73
|
+
dataclassBaseFields: tuple[dataclasses.Field[Any], ...] = dataclasses.fields(importLogicalPath2Identifier(dataclassLogicalPathModule, dataclassIdentifier)) # pyright: ignore [reportArgumentType]
|
|
74
|
+
dataclassIdentifierParallel: identifierDotAttribute = 'Parallel' + dataclassIdentifier
|
|
75
|
+
dataclassFieldsParallel: tuple[dataclasses.Field[Any], ...] = dataclasses.fields(importLogicalPath2Identifier(dataclassLogicalPathModule, dataclassIdentifierParallel)) # pyright: ignore [reportArgumentType]
|
|
76
|
+
onlyParallelFields: list[dataclasses.Field[Any]] = [field for field in dataclassFieldsParallel if field.name not in [fieldBase.name for fieldBase in dataclassBaseFields]]
|
|
77
|
+
|
|
78
|
+
Official_fieldOrder: list[str] = []
|
|
79
|
+
dictionaryDeReConstruction: dict[str, DeReConstructField2ast] = {}
|
|
80
|
+
|
|
81
|
+
dataclassClassDef: ast.ClassDef | None = extractClassDef(parseLogicalPath2astModule(dataclassLogicalPathModule), dataclassIdentifierParallel)
|
|
82
|
+
if not dataclassClassDef:
|
|
83
|
+
message = f"I could not find `{dataclassIdentifierParallel = }` in `{dataclassLogicalPathModule = }`."
|
|
84
|
+
raise ValueError(message)
|
|
85
|
+
|
|
86
|
+
for aField in onlyParallelFields:
|
|
87
|
+
Official_fieldOrder.append(aField.name)
|
|
88
|
+
dictionaryDeReConstruction[aField.name] = DeReConstructField2ast(dataclassLogicalPathModule, dataclassClassDef, dataclassInstanceIdentifier, aField)
|
|
89
|
+
|
|
90
|
+
shatteredDataclassParallel = ShatteredDataclass(
|
|
91
|
+
countingVariableAnnotation=shatteredDataclass.countingVariableAnnotation,
|
|
92
|
+
countingVariableName=shatteredDataclass.countingVariableName,
|
|
93
|
+
field2AnnAssign={**shatteredDataclass.field2AnnAssign, **{dictionaryDeReConstruction[field].name: dictionaryDeReConstruction[field].astAnnAssignConstructor for field in Official_fieldOrder}},
|
|
94
|
+
Z0Z_field2AnnAssign={**shatteredDataclass.Z0Z_field2AnnAssign, **{dictionaryDeReConstruction[field].name: dictionaryDeReConstruction[field].Z0Z_hack for field in Official_fieldOrder}},
|
|
95
|
+
list_argAnnotated4ArgumentsSpecification=shatteredDataclass.list_argAnnotated4ArgumentsSpecification + [dictionaryDeReConstruction[field].ast_argAnnotated for field in Official_fieldOrder],
|
|
96
|
+
list_keyword_field__field4init=shatteredDataclass.list_keyword_field__field4init + [dictionaryDeReConstruction[field].ast_keyword_field__field for field in Official_fieldOrder if dictionaryDeReConstruction[field].init],
|
|
97
|
+
listAnnotations=shatteredDataclass.listAnnotations + [dictionaryDeReConstruction[field].astAnnotation for field in Official_fieldOrder],
|
|
98
|
+
listName4Parameters=shatteredDataclass.listName4Parameters + [dictionaryDeReConstruction[field].astName for field in Official_fieldOrder],
|
|
99
|
+
listUnpack=shatteredDataclass.listUnpack + [Make.AnnAssign(dictionaryDeReConstruction[field].astName, dictionaryDeReConstruction[field].astAnnotation, dictionaryDeReConstruction[field].ast_nameDOTname) for field in Official_fieldOrder],
|
|
100
|
+
map_stateDOTfield2Name={**shatteredDataclass.map_stateDOTfield2Name, **{dictionaryDeReConstruction[field].ast_nameDOTname: dictionaryDeReConstruction[field].astName for field in Official_fieldOrder}},
|
|
101
|
+
)
|
|
102
|
+
shatteredDataclassParallel.fragments4AssignmentOrParameters = Make.Tuple(shatteredDataclassParallel.listName4Parameters, Make.Store())
|
|
103
|
+
shatteredDataclassParallel.repack = Make.Assign([Make.Name(dataclassInstanceIdentifier)], value=Make.Call(Make.Name(dataclassIdentifierParallel), list_keyword=shatteredDataclassParallel.list_keyword_field__field4init))
|
|
104
|
+
shatteredDataclassParallel.signatureReturnAnnotation = Make.Subscript(Make.Name('tuple'), Make.Tuple(shatteredDataclassParallel.listAnnotations))
|
|
105
|
+
|
|
106
|
+
shatteredDataclassParallel.imports.update(*(dictionaryDeReConstruction[field].ledger for field in Official_fieldOrder))
|
|
107
|
+
shatteredDataclassParallel.imports.addImportFrom_asStr(dataclassLogicalPathModule, dataclassIdentifierParallel)
|
|
108
|
+
shatteredDataclassParallel.imports.update(shatteredDataclass.imports)
|
|
109
|
+
shatteredDataclassParallel.imports.removeImportFrom(dataclassLogicalPathModule, dataclassIdentifier)
|
|
110
|
+
|
|
111
|
+
# END add the parallel state fields to the count function ------------------------------------------------
|
|
112
|
+
|
|
113
|
+
ingredientsFunction.imports.update(shatteredDataclassParallel.imports)
|
|
114
|
+
ingredientsFunction: IngredientsFunction = removeDataclassFromFunction(ingredientsFunction, shatteredDataclassParallel)
|
|
115
|
+
|
|
116
|
+
# START add the parallel logic to the count function ------------------------------------------------
|
|
117
|
+
|
|
118
|
+
findThis = Be.While.testIs(Be.Compare.leftIs(IfThis.isNameIdentifier('leafConnectee')))
|
|
119
|
+
captureCountGapsCodeBlock: NodeTourist[ast.While, Sequence[ast.stmt]] = NodeTourist(findThis, doThat = Then.extractIt(DOT.body))
|
|
120
|
+
countGapsCodeBlock: Sequence[ast.stmt] = raiseIfNone(captureCountGapsCodeBlock.captureLastMatch(ingredientsFunction.astFunctionDef))
|
|
121
|
+
|
|
122
|
+
thisIsMyTaskIndexCodeBlock = ast.If(ast.BoolOp(ast.Or()
|
|
123
|
+
, values=[ast.Compare(ast.Name('leaf1ndex'), ops=[ast.NotEq()], comparators=[ast.Name('taskDivisions')])
|
|
124
|
+
, ast.Compare(Make.Mod.join([ast.Name('leafConnectee'), ast.Name('taskDivisions')]), ops=[ast.Eq()], comparators=[ast.Name('taskIndex')])])
|
|
125
|
+
, body=list(countGapsCodeBlock[0:-1]))
|
|
126
|
+
|
|
127
|
+
countGapsCodeBlockNew: list[ast.stmt] = [thisIsMyTaskIndexCodeBlock, countGapsCodeBlock[-1]]
|
|
128
|
+
NodeChanger[ast.While, hasDOTbody](findThis, doThat = Grab.bodyAttribute(Then.replaceWith(countGapsCodeBlockNew))).visit(ingredientsFunction.astFunctionDef)
|
|
129
|
+
|
|
130
|
+
# END add the parallel logic to the count function ------------------------------------------------
|
|
131
|
+
|
|
132
|
+
ingredientsFunction = removeUnusedParameters(ingredientsFunction)
|
|
133
|
+
|
|
134
|
+
ingredientsFunction = decorateCallableWithNumba(ingredientsFunction, parametersNumbaLight)
|
|
135
|
+
|
|
136
|
+
# START unpack/repack the dataclass function ------------------------------------------------
|
|
137
|
+
sourceCallableIdentifier = identifierCallableSourceDispatcherDEFAULT
|
|
138
|
+
|
|
139
|
+
unRepackDataclass: IngredientsFunction = astModuleToIngredientsFunction(astModule, sourceCallableIdentifier)
|
|
140
|
+
unRepackDataclass.astFunctionDef.name = 'unRepack' + dataclassIdentifierParallel
|
|
141
|
+
unRepackDataclass.imports.update(shatteredDataclassParallel.imports)
|
|
142
|
+
NodeChanger(
|
|
143
|
+
findThis = Be.arg.annotationIs(Be.Name.idIs(lambda thisAttribute: thisAttribute == dataclassIdentifier)) # pyright: ignore[reportArgumentType]
|
|
144
|
+
, doThat = Grab.annotationAttribute(Grab.idAttribute(Then.replaceWith(dataclassIdentifierParallel)))
|
|
145
|
+
).visit(unRepackDataclass.astFunctionDef)
|
|
146
|
+
unRepackDataclass.astFunctionDef.returns = Make.Name(dataclassIdentifierParallel)
|
|
147
|
+
targetCallableIdentifier: identifierDotAttribute = ingredientsFunction.astFunctionDef.name
|
|
148
|
+
unRepackDataclass = unpackDataclassCallFunctionRepackDataclass(unRepackDataclass, targetCallableIdentifier, shatteredDataclassParallel)
|
|
149
|
+
|
|
150
|
+
astTuple: ast.Tuple = raiseIfNone(NodeTourist[ast.Return, ast.Tuple](Be.Return, Then.extractIt(DOT.value)).captureLastMatch(ingredientsFunction.astFunctionDef)) # pyright: ignore[reportArgumentType]
|
|
151
|
+
astTuple.ctx = Make.Store()
|
|
152
|
+
changeAssignCallToTarget: NodeChanger[ast.Assign, ast.Assign] = NodeChanger(
|
|
153
|
+
findThis = Be.Assign.valueIs(IfThis.isCallIdentifier(targetCallableIdentifier))
|
|
154
|
+
, doThat = Then.replaceWith(Make.Assign([astTuple], value=Make.Call(Make.Name(targetCallableIdentifier), astTuple.elts)))
|
|
155
|
+
)
|
|
156
|
+
changeAssignCallToTarget.visit(unRepackDataclass.astFunctionDef)
|
|
157
|
+
|
|
158
|
+
ingredientsDoTheNeedful: IngredientsFunction = IngredientsFunction(
|
|
159
|
+
astFunctionDef = Make.FunctionDef('doTheNeedful'
|
|
160
|
+
, argumentSpecification=Make.arguments(list_arg=[Make.arg('state', annotation=Make.Name(dataclassIdentifierParallel)), Make.arg('concurrencyLimit', annotation=Make.Name('int'))])
|
|
161
|
+
, body=[Make.Assign([Make.Name('stateParallel', Make.Store())], value=Make.Call(Make.Name('deepcopy'), listParameters=[Make.Name('state')]))
|
|
162
|
+
, Make.AnnAssign(Make.Name('listStatesParallel', Make.Store()), annotation=Make.Subscript(value=Make.Name('list'), slice=Make.Name(dataclassIdentifierParallel))
|
|
163
|
+
, value=Make.Mult.join([Make.List([Make.Name('stateParallel')]), Make.Attribute(Make.Name('stateParallel'), 'taskDivisions')]))
|
|
164
|
+
, Make.AnnAssign(Make.Name('groupsOfFoldsTotal', Make.Store()), annotation=Make.Name('int'), value=Make.Constant(value=0))
|
|
165
|
+
|
|
166
|
+
, Make.AnnAssign(Make.Name('dictionaryConcurrency', Make.Store()), annotation=Make.Subscript(value=Make.Name('dict'), slice=Make.Tuple([Make.Name('int'), Make.Subscript(value=Make.Name('ConcurrentFuture'), slice=Make.Name(dataclassIdentifierParallel))])), value=Make.Dict())
|
|
167
|
+
, Make.With(items=[Make.withitem(context_expr=Make.Call(Make.Name('ProcessPoolExecutor'), listParameters=[Make.Name('concurrencyLimit')]), optional_vars=Make.Name('concurrencyManager', Make.Store()))]
|
|
168
|
+
, body=[Make.For(Make.Name('indexSherpa', Make.Store()), iter=Make.Call(Make.Name('range'), listParameters=[Make.Attribute(Make.Name('stateParallel'), 'taskDivisions')])
|
|
169
|
+
, body=[Make.Assign([Make.Name('state', Make.Store())], value=Make.Call(Make.Name('deepcopy'), listParameters=[Make.Name('stateParallel')]))
|
|
170
|
+
, Make.Assign([Make.Attribute(Make.Name('state'), 'taskIndex', context=Make.Store())], value=Make.Name('indexSherpa'))
|
|
171
|
+
, Make.Assign([Make.Subscript(Make.Name('dictionaryConcurrency'), slice=Make.Name('indexSherpa'), context=Make.Store())], value=Make.Call(Make.Attribute(Make.Name('concurrencyManager'), 'submit'), listParameters=[Make.Name(unRepackDataclass.astFunctionDef.name), Make.Name('state')]))])
|
|
172
|
+
, Make.For(Make.Name('indexSherpa', Make.Store()), iter=Make.Call(Make.Name('range'), listParameters=[Make.Attribute(Make.Name('stateParallel'), 'taskDivisions')])
|
|
173
|
+
, body=[Make.Assign([Make.Subscript(Make.Name('listStatesParallel'), slice=Make.Name('indexSherpa'), context=Make.Store())], value=Make.Call(Make.Attribute(Make.Subscript(Make.Name('dictionaryConcurrency'), slice=Make.Name('indexSherpa')), 'result')))
|
|
174
|
+
, Make.AugAssign(Make.Name('groupsOfFoldsTotal', Make.Store()), op=ast.Add(), value=Make.Attribute(Make.Subscript(Make.Name('listStatesParallel'), slice=Make.Name('indexSherpa')), 'groupsOfFolds'))])])
|
|
175
|
+
|
|
176
|
+
, Make.AnnAssign(Make.Name('foldsTotal', Make.Store()), annotation=Make.Name('int'), value=Make.Mult.join([Make.Name('groupsOfFoldsTotal'), Make.Attribute(Make.Name('stateParallel'), 'leavesTotal')]))
|
|
177
|
+
, Make.Return(Make.Tuple([Make.Name('foldsTotal'), Make.Name('listStatesParallel')]))]
|
|
178
|
+
, returns=Make.Subscript(Make.Name('tuple'), slice=Make.Tuple([Make.Name('int'), Make.Subscript(Make.Name('list'), slice=Make.Name(dataclassIdentifierParallel))])))
|
|
179
|
+
, imports = LedgerOfImports(Make.Module([Make.ImportFrom('concurrent.futures', list_alias=[Make.alias('Future', asName='ConcurrentFuture'), Make.alias('ProcessPoolExecutor')]),
|
|
180
|
+
Make.ImportFrom('copy', list_alias=[Make.alias('deepcopy')]),
|
|
181
|
+
Make.ImportFrom('multiprocessing', list_alias=[Make.alias('set_start_method', asName='multiprocessing_set_start_method')])])
|
|
182
|
+
)
|
|
183
|
+
)
|
|
184
|
+
|
|
185
|
+
ingredientsModule = IngredientsModule([ingredientsFunction, unRepackDataclass, ingredientsDoTheNeedful]
|
|
186
|
+
, prologue = Make.Module([Make.If(test=Make.Compare(left=Make.Name('__name__'), ops=[Make.Eq()], comparators=[Make.Constant('__main__')]), body=[Make.Expr(Make.Call(Make.Name('multiprocessing_set_start_method'), listParameters=[Make.Constant('spawn')]))])])
|
|
187
|
+
)
|
|
188
|
+
ingredientsModule.removeImportFromModule('numpy')
|
|
189
|
+
|
|
190
|
+
pathFilename: PurePath = getPathFilename(packageSettings.pathPackage, logicalPathInfix, moduleIdentifier)
|
|
191
|
+
|
|
192
|
+
write_astModule(ingredientsModule, pathFilename, packageSettings.identifierPackage)
|
|
193
|
+
|
|
194
|
+
return pathFilename
|
|
195
|
+
|
|
196
|
+
def makeMapFoldingModules() -> None:
|
|
197
|
+
"""Make multidimensional map folding modules."""
|
|
198
|
+
astModule = getModule(logicalPathInfix='algorithms')
|
|
199
|
+
pathFilename: PurePath = makeDaoOfMapFoldingNumba(astModule, 'daoOfMapFoldingNumba', None, logicalPathInfixDEFAULT, identifierCallableSourceDispatcherDEFAULT)
|
|
200
|
+
|
|
201
|
+
astModule = getModule(logicalPathInfix='algorithms')
|
|
202
|
+
pathFilename = makeDaoOfMapFoldingParallelNumba(astModule, 'countParallelNumba', None, logicalPathInfixDEFAULT, identifierCallableSourceDispatcherDEFAULT)
|
|
203
|
+
|
|
204
|
+
astModule: ast.Module = getModule(logicalPathInfix='algorithms')
|
|
205
|
+
makeInitializeState(astModule, 'initializeState', 'transitionOnGroupsOfFolds', logicalPathInfixDEFAULT)
|
|
206
|
+
|
|
207
|
+
astModule = getModule(logicalPathInfix='algorithms')
|
|
208
|
+
pathFilename = makeTheorem2(astModule, 'theorem2', None, logicalPathInfixDEFAULT, None)
|
|
209
|
+
|
|
210
|
+
astModule = parsePathFilename2astModule(pathFilename)
|
|
211
|
+
pathFilename = trimTheorem2(astModule, 'theorem2Trimmed', None, logicalPathInfixDEFAULT, None)
|
|
212
|
+
|
|
213
|
+
astModule = parsePathFilename2astModule(pathFilename)
|
|
214
|
+
pathFilename = numbaOnTheorem2(astModule, 'theorem2Numba', None, logicalPathInfixDEFAULT, None)
|
|
215
|
+
|
|
216
|
+
astImportFrom: ast.ImportFrom = Make.ImportFrom(getLogicalPath(packageSettings.identifierPackage, logicalPathInfixDEFAULT, 'theorem2Numba'), list_alias=[Make.alias(identifierCallableSourceDEFAULT)])
|
|
217
|
+
makeUnRePackDataclass(astImportFrom)
|
|
218
|
+
|
|
219
|
+
if __name__ == '__main__':
|
|
220
|
+
makeMapFoldingModules()
|
|
@@ -0,0 +1,152 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Map folding AST transformation system: Comprehensive transformation orchestration and module generation.
|
|
3
|
+
|
|
4
|
+
This module provides the orchestration layer of the map folding AST transformation system,
|
|
5
|
+
implementing comprehensive tools that coordinate all transformation stages to generate optimized
|
|
6
|
+
implementations with diverse computational strategies and performance characteristics. Building
|
|
7
|
+
upon the foundational pattern recognition, structural decomposition, core transformation tools,
|
|
8
|
+
Numba integration, and configuration management established in previous layers, this module
|
|
9
|
+
executes complete transformation processes that convert high-level dataclass-based algorithms
|
|
10
|
+
into specialized variants optimized for specific execution contexts.
|
|
11
|
+
|
|
12
|
+
The transformation orchestration addresses the full spectrum of optimization requirements for
|
|
13
|
+
map folding computational research through systematic application of the complete transformation
|
|
14
|
+
toolkit. The comprehensive approach decomposes dataclass parameters into primitive values for
|
|
15
|
+
Numba compatibility while removing object-oriented overhead and preserving computational logic,
|
|
16
|
+
generates concurrent execution variants using ProcessPoolExecutor with task division and result
|
|
17
|
+
aggregation, creates dedicated modules for counting variable setup with transformed loop conditions,
|
|
18
|
+
and provides theorem-specific transformations with configurable optimization levels including
|
|
19
|
+
trimmed variants and Numba-accelerated implementations.
|
|
20
|
+
|
|
21
|
+
The orchestration process operates through systematic AST manipulation that analyzes source
|
|
22
|
+
algorithms to extract dataclass dependencies, transforms data access patterns, applies performance
|
|
23
|
+
optimizations, and generates specialized modules with consistent naming conventions and filesystem
|
|
24
|
+
organization. The comprehensive transformation process coordinates pattern recognition for structural
|
|
25
|
+
analysis, dataclass decomposition for parameter optimization, function transformation for signature
|
|
26
|
+
adaptation, Numba integration for compilation optimization, and configuration management for
|
|
27
|
+
systematic generation control.
|
|
28
|
+
|
|
29
|
+
Generated modules maintain algorithmic correctness while providing significant performance
|
|
30
|
+
improvements through just-in-time compilation, parallel execution, and optimized data structures
|
|
31
|
+
tailored for specific computational requirements essential to large-scale map folding research.
|
|
32
|
+
"""
|
|
33
|
+
|
|
34
|
+
from astToolkit import (
|
|
35
|
+
Be, DOT, identifierDotAttribute, IngredientsFunction, NodeTourist, parseLogicalPath2astModule, Then)
|
|
36
|
+
from autoflake import fix_code as autoflake_fix_code
|
|
37
|
+
from hunterMakesPy import raiseIfNone, writeStringToHere
|
|
38
|
+
from mapFolding import packageSettings
|
|
39
|
+
from mapFolding.someAssemblyRequired import identifierModuleSourceAlgorithmDEFAULT, logicalPathInfixDEFAULT
|
|
40
|
+
from os import PathLike
|
|
41
|
+
from pathlib import PurePath
|
|
42
|
+
from typing import Any
|
|
43
|
+
import ast
|
|
44
|
+
import io
|
|
45
|
+
|
|
46
|
+
def findDataclass(ingredientsFunction: IngredientsFunction) -> tuple[str, str, str]:
|
|
47
|
+
"""Extract dataclass information from a function's AST for transformation operations.
|
|
48
|
+
|
|
49
|
+
(AI generated docstring)
|
|
50
|
+
|
|
51
|
+
Analyzes the first parameter of a function to identify the dataclass type annotation
|
|
52
|
+
and instance identifier, then locates the module where the dataclass is defined by
|
|
53
|
+
examining the function's import statements. This information is essential for
|
|
54
|
+
dataclass decomposition and transformation operations.
|
|
55
|
+
|
|
56
|
+
Parameters
|
|
57
|
+
----------
|
|
58
|
+
ingredientsFunction : IngredientsFunction
|
|
59
|
+
Function container with AST and import information.
|
|
60
|
+
|
|
61
|
+
Returns
|
|
62
|
+
-------
|
|
63
|
+
dataclassLogicalPathModule : str
|
|
64
|
+
Module logical path where the dataclass is defined.
|
|
65
|
+
dataclassIdentifier : str
|
|
66
|
+
Class name of the dataclass.
|
|
67
|
+
dataclassInstanceIdentifier : str
|
|
68
|
+
Parameter name for the dataclass instance.
|
|
69
|
+
|
|
70
|
+
Raises
|
|
71
|
+
------
|
|
72
|
+
ValueError
|
|
73
|
+
If dataclass information cannot be extracted from the function.
|
|
74
|
+
|
|
75
|
+
"""
|
|
76
|
+
dataclassName: ast.expr = raiseIfNone(NodeTourist(Be.arg, Then.extractIt(DOT.annotation)).captureLastMatch(ingredientsFunction.astFunctionDef))
|
|
77
|
+
dataclassIdentifier: str = raiseIfNone(NodeTourist(Be.Name, Then.extractIt(DOT.id)).captureLastMatch(dataclassName))
|
|
78
|
+
dataclassLogicalPathModule = None
|
|
79
|
+
for moduleWithLogicalPath, listNameTuples in ingredientsFunction.imports._dictionaryImportFrom.items(): # noqa: SLF001
|
|
80
|
+
for nameTuple in listNameTuples:
|
|
81
|
+
if nameTuple[0] == dataclassIdentifier:
|
|
82
|
+
dataclassLogicalPathModule = moduleWithLogicalPath
|
|
83
|
+
break
|
|
84
|
+
if dataclassLogicalPathModule:
|
|
85
|
+
break
|
|
86
|
+
dataclassInstanceIdentifier: identifierDotAttribute = raiseIfNone(NodeTourist(Be.arg, Then.extractIt(DOT.arg)).captureLastMatch(ingredientsFunction.astFunctionDef))
|
|
87
|
+
return raiseIfNone(dataclassLogicalPathModule), dataclassIdentifier, dataclassInstanceIdentifier
|
|
88
|
+
|
|
89
|
+
def getLogicalPath(identifierPackage: str | None = None, logicalPathInfix: str | None = None, *moduleIdentifier: str | None) -> identifierDotAttribute:
|
|
90
|
+
"""Get logical path from components."""
|
|
91
|
+
listLogicalPathParts: list[str] = []
|
|
92
|
+
if identifierPackage:
|
|
93
|
+
listLogicalPathParts.append(identifierPackage)
|
|
94
|
+
if logicalPathInfix:
|
|
95
|
+
listLogicalPathParts.append(logicalPathInfix)
|
|
96
|
+
if moduleIdentifier:
|
|
97
|
+
listLogicalPathParts.extend([module for module in moduleIdentifier if module is not None])
|
|
98
|
+
return '.'.join(listLogicalPathParts)
|
|
99
|
+
|
|
100
|
+
def getModule(identifierPackage: str | None = packageSettings.identifierPackage, logicalPathInfix: str | None = logicalPathInfixDEFAULT, moduleIdentifier: str | None = identifierModuleSourceAlgorithmDEFAULT) -> ast.Module:
|
|
101
|
+
"""Get Module."""
|
|
102
|
+
logicalPathSourceModule: identifierDotAttribute = getLogicalPath(identifierPackage, logicalPathInfix, moduleIdentifier)
|
|
103
|
+
astModule: ast.Module = parseLogicalPath2astModule(logicalPathSourceModule)
|
|
104
|
+
return astModule
|
|
105
|
+
|
|
106
|
+
def getPathFilename(pathRoot: PathLike[str] | PurePath | None = packageSettings.pathPackage, logicalPathInfix: PathLike[str] | PurePath | str | None = None, moduleIdentifier: str = '', fileExtension: str = packageSettings.fileExtension) -> PurePath:
|
|
107
|
+
"""Construct filesystem path from logical path.
|
|
108
|
+
|
|
109
|
+
Parameters
|
|
110
|
+
----------
|
|
111
|
+
pathRoot : PathLike[str] | PurePath | None = packageSettings.pathPackage
|
|
112
|
+
Base directory for the package structure.
|
|
113
|
+
logicalPathInfix : PathLike[str] | PurePath | str | None = None
|
|
114
|
+
Subdirectory for organizing generated modules.
|
|
115
|
+
moduleIdentifier : str = ''
|
|
116
|
+
Name of the specific module file.
|
|
117
|
+
fileExtension : str = packageSettings.fileExtension
|
|
118
|
+
File extension for Python modules.
|
|
119
|
+
|
|
120
|
+
Returns
|
|
121
|
+
-------
|
|
122
|
+
pathFilename : PurePath
|
|
123
|
+
Complete filesystem path for the generated module file.
|
|
124
|
+
|
|
125
|
+
"""
|
|
126
|
+
pathFilename = PurePath(moduleIdentifier + fileExtension)
|
|
127
|
+
if logicalPathInfix:
|
|
128
|
+
pathFilename = PurePath(*(str(logicalPathInfix).split('.')), pathFilename)
|
|
129
|
+
if pathRoot:
|
|
130
|
+
pathFilename = PurePath(pathRoot, pathFilename)
|
|
131
|
+
return pathFilename
|
|
132
|
+
|
|
133
|
+
def write_astModule(astModule: ast.Module, pathFilename: PathLike[Any] | PurePath | io.TextIOBase, packageName: str | None = None) -> None:
|
|
134
|
+
"""Prototype.
|
|
135
|
+
|
|
136
|
+
Parameters
|
|
137
|
+
----------
|
|
138
|
+
astModule : ast.Module
|
|
139
|
+
The AST module to be written to a file.
|
|
140
|
+
pathFilename : PathLike[Any] | PurePath
|
|
141
|
+
The file path where the module should be written.
|
|
142
|
+
packageName : str | None = None
|
|
143
|
+
Optional package name to preserve in import optimization.
|
|
144
|
+
"""
|
|
145
|
+
ast.fix_missing_locations(astModule)
|
|
146
|
+
pythonSource: str = ast.unparse(astModule)
|
|
147
|
+
autoflake_additional_imports: list[str] = []
|
|
148
|
+
if packageName:
|
|
149
|
+
autoflake_additional_imports.append(packageName)
|
|
150
|
+
pythonSource = autoflake_fix_code(pythonSource, autoflake_additional_imports, expand_star_imports=False, remove_all_unused_imports=True, remove_duplicate_keys = False, remove_unused_variables = False)
|
|
151
|
+
writeStringToHere(pythonSource, pathFilename)
|
|
152
|
+
|
|
@@ -136,7 +136,7 @@ While Numba offers multiple decorators (`@jit`, `@njit`, `@vectorize`), this too
|
|
|
136
136
|
on the general-purpose `@jit` decorator with configurable parameters for flexibility.
|
|
137
137
|
"""
|
|
138
138
|
|
|
139
|
-
def decorateCallableWithNumba(ingredientsFunction: IngredientsFunction, parametersNumba: ParametersNumba | None = None) -> IngredientsFunction:
|
|
139
|
+
def decorateCallableWithNumba(ingredientsFunction: IngredientsFunction, parametersNumba: ParametersNumba | None = None) -> IngredientsFunction:
|
|
140
140
|
"""Transform a Python function into a Numba-accelerated version with appropriate decorators.
|
|
141
141
|
|
|
142
142
|
(AI generated docstring)
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
"""Everything in this directory is synthesized by other modules in the package."""
|
|
@@ -0,0 +1,148 @@
|
|
|
1
|
+
from mapFolding.dataBaskets import MapFoldingState
|
|
2
|
+
from mapFolding.syntheticModules.A007822.asynchronousAnnex import (
|
|
3
|
+
filterAsymmetricFolds, getAsymmetricFoldsTotal, initializeConcurrencyManager)
|
|
4
|
+
|
|
5
|
+
def activeLeafGreaterThan0(state: MapFoldingState) -> bool:
|
|
6
|
+
return state.leaf1ndex > 0
|
|
7
|
+
|
|
8
|
+
def activeLeafGreaterThanLeavesTotal(state: MapFoldingState) -> bool:
|
|
9
|
+
return state.leaf1ndex > state.leavesTotal
|
|
10
|
+
|
|
11
|
+
def activeLeafIsTheFirstLeaf(state: MapFoldingState) -> bool:
|
|
12
|
+
return state.leaf1ndex <= 1
|
|
13
|
+
|
|
14
|
+
def activeLeafIsUnconstrainedInAllDimensions(state: MapFoldingState) -> bool:
|
|
15
|
+
return not state.dimensionsUnconstrained
|
|
16
|
+
|
|
17
|
+
def activeLeafUnconstrainedInThisDimension(state: MapFoldingState) -> MapFoldingState:
|
|
18
|
+
state.dimensionsUnconstrained -= 1
|
|
19
|
+
return state
|
|
20
|
+
|
|
21
|
+
def filterCommonGaps(state: MapFoldingState) -> MapFoldingState:
|
|
22
|
+
state.gapsWhere[state.gap1ndex] = state.gapsWhere[state.indexMiniGap]
|
|
23
|
+
if state.countDimensionsGapped[state.gapsWhere[state.indexMiniGap]] == state.dimensionsUnconstrained:
|
|
24
|
+
state = incrementActiveGap(state)
|
|
25
|
+
state.countDimensionsGapped[state.gapsWhere[state.indexMiniGap]] = 0
|
|
26
|
+
return state
|
|
27
|
+
|
|
28
|
+
def gapAvailable(state: MapFoldingState) -> bool:
|
|
29
|
+
return state.leaf1ndex > 0
|
|
30
|
+
|
|
31
|
+
def incrementActiveGap(state: MapFoldingState) -> MapFoldingState:
|
|
32
|
+
state.gap1ndex += 1
|
|
33
|
+
return state
|
|
34
|
+
|
|
35
|
+
def incrementGap1ndexCeiling(state: MapFoldingState) -> MapFoldingState:
|
|
36
|
+
state.gap1ndexCeiling += 1
|
|
37
|
+
return state
|
|
38
|
+
|
|
39
|
+
def incrementIndexMiniGap(state: MapFoldingState) -> MapFoldingState:
|
|
40
|
+
state.indexMiniGap += 1
|
|
41
|
+
return state
|
|
42
|
+
|
|
43
|
+
def initializeIndexMiniGap(state: MapFoldingState) -> MapFoldingState:
|
|
44
|
+
state.indexMiniGap = state.gap1ndex
|
|
45
|
+
return state
|
|
46
|
+
|
|
47
|
+
def initializeVariablesToFindGaps(state: MapFoldingState) -> MapFoldingState:
|
|
48
|
+
state.dimensionsUnconstrained = state.dimensionsTotal
|
|
49
|
+
state.gap1ndexCeiling = state.gapRangeStart[state.leaf1ndex - 1]
|
|
50
|
+
state.indexDimension = 0
|
|
51
|
+
return state
|
|
52
|
+
|
|
53
|
+
def insertActiveLeaf(state: MapFoldingState) -> MapFoldingState:
|
|
54
|
+
state.indexLeaf = 0
|
|
55
|
+
while state.indexLeaf < state.leaf1ndex:
|
|
56
|
+
state.gapsWhere[state.gap1ndexCeiling] = state.indexLeaf
|
|
57
|
+
state.gap1ndexCeiling += 1
|
|
58
|
+
state.indexLeaf += 1
|
|
59
|
+
return state
|
|
60
|
+
|
|
61
|
+
def insertActiveLeafAtGap(state: MapFoldingState) -> MapFoldingState:
|
|
62
|
+
state.gap1ndex -= 1
|
|
63
|
+
state.leafAbove[state.leaf1ndex] = state.gapsWhere[state.gap1ndex]
|
|
64
|
+
state.leafBelow[state.leaf1ndex] = state.leafBelow[state.leafAbove[state.leaf1ndex]]
|
|
65
|
+
state.leafBelow[state.leafAbove[state.leaf1ndex]] = state.leaf1ndex
|
|
66
|
+
state.leafAbove[state.leafBelow[state.leaf1ndex]] = state.leaf1ndex
|
|
67
|
+
state.gapRangeStart[state.leaf1ndex] = state.gap1ndex
|
|
68
|
+
state.leaf1ndex += 1
|
|
69
|
+
return state
|
|
70
|
+
|
|
71
|
+
def leafBelowSentinelIs1(state: MapFoldingState) -> bool:
|
|
72
|
+
return state.leafBelow[0] == 1
|
|
73
|
+
|
|
74
|
+
def leafConnecteeIsActiveLeaf(state: MapFoldingState) -> bool:
|
|
75
|
+
return state.leafConnectee == state.leaf1ndex
|
|
76
|
+
|
|
77
|
+
def lookForGaps(state: MapFoldingState) -> MapFoldingState:
|
|
78
|
+
state.gapsWhere[state.gap1ndexCeiling] = state.leafConnectee
|
|
79
|
+
if state.countDimensionsGapped[state.leafConnectee] == 0:
|
|
80
|
+
state = incrementGap1ndexCeiling(state)
|
|
81
|
+
state.countDimensionsGapped[state.leafConnectee] += 1
|
|
82
|
+
return state
|
|
83
|
+
|
|
84
|
+
def lookupLeafConnecteeInConnectionGraph(state: MapFoldingState) -> MapFoldingState:
|
|
85
|
+
state.leafConnectee = state.connectionGraph[state.indexDimension, state.leaf1ndex, state.leaf1ndex]
|
|
86
|
+
return state
|
|
87
|
+
|
|
88
|
+
def loopingLeavesConnectedToActiveLeaf(state: MapFoldingState) -> bool:
|
|
89
|
+
return state.leafConnectee != state.leaf1ndex
|
|
90
|
+
|
|
91
|
+
def loopingThroughTheDimensions(state: MapFoldingState) -> bool:
|
|
92
|
+
return state.indexDimension < state.dimensionsTotal
|
|
93
|
+
|
|
94
|
+
def loopingToActiveGapCeiling(state: MapFoldingState) -> bool:
|
|
95
|
+
return state.indexMiniGap < state.gap1ndexCeiling
|
|
96
|
+
|
|
97
|
+
def noGapsHere(state: MapFoldingState) -> bool:
|
|
98
|
+
return state.leaf1ndex > 0 and state.gap1ndex == state.gapRangeStart[state.leaf1ndex - 1]
|
|
99
|
+
|
|
100
|
+
def tryAnotherLeafConnectee(state: MapFoldingState) -> MapFoldingState:
|
|
101
|
+
state.leafConnectee = state.connectionGraph[state.indexDimension, state.leaf1ndex, state.leafBelow[state.leafConnectee]]
|
|
102
|
+
return state
|
|
103
|
+
|
|
104
|
+
def tryNextDimension(state: MapFoldingState) -> MapFoldingState:
|
|
105
|
+
state.indexDimension += 1
|
|
106
|
+
return state
|
|
107
|
+
|
|
108
|
+
def undoLastLeafPlacement(state: MapFoldingState) -> MapFoldingState:
|
|
109
|
+
state.leaf1ndex -= 1
|
|
110
|
+
state.leafBelow[state.leafAbove[state.leaf1ndex]] = state.leafBelow[state.leaf1ndex]
|
|
111
|
+
state.leafAbove[state.leafBelow[state.leaf1ndex]] = state.leafAbove[state.leaf1ndex]
|
|
112
|
+
return state
|
|
113
|
+
|
|
114
|
+
def count(state: MapFoldingState) -> MapFoldingState:
|
|
115
|
+
while activeLeafGreaterThan0(state):
|
|
116
|
+
if activeLeafIsTheFirstLeaf(state) or leafBelowSentinelIs1(state):
|
|
117
|
+
if activeLeafGreaterThanLeavesTotal(state):
|
|
118
|
+
filterAsymmetricFolds(state.leafBelow)
|
|
119
|
+
else:
|
|
120
|
+
state = initializeVariablesToFindGaps(state)
|
|
121
|
+
while loopingThroughTheDimensions(state):
|
|
122
|
+
state = lookupLeafConnecteeInConnectionGraph(state)
|
|
123
|
+
if leafConnecteeIsActiveLeaf(state):
|
|
124
|
+
state = activeLeafUnconstrainedInThisDimension(state)
|
|
125
|
+
else:
|
|
126
|
+
while loopingLeavesConnectedToActiveLeaf(state):
|
|
127
|
+
state = lookForGaps(state)
|
|
128
|
+
state = tryAnotherLeafConnectee(state)
|
|
129
|
+
state = tryNextDimension(state)
|
|
130
|
+
if activeLeafIsUnconstrainedInAllDimensions(state):
|
|
131
|
+
state = insertActiveLeaf(state)
|
|
132
|
+
state = initializeIndexMiniGap(state)
|
|
133
|
+
while loopingToActiveGapCeiling(state):
|
|
134
|
+
state = filterCommonGaps(state)
|
|
135
|
+
state = incrementIndexMiniGap(state)
|
|
136
|
+
while noGapsHere(state):
|
|
137
|
+
state = undoLastLeafPlacement(state)
|
|
138
|
+
if gapAvailable(state):
|
|
139
|
+
state = insertActiveLeafAtGap(state)
|
|
140
|
+
else:
|
|
141
|
+
state.groupsOfFolds = getAsymmetricFoldsTotal()
|
|
142
|
+
state.groupsOfFolds = (state.groupsOfFolds + 1) // 2
|
|
143
|
+
return state
|
|
144
|
+
|
|
145
|
+
def doTheNeedful(state: MapFoldingState) -> MapFoldingState:
|
|
146
|
+
initializeConcurrencyManager()
|
|
147
|
+
state = count(state)
|
|
148
|
+
return state
|
|
@@ -0,0 +1,68 @@
|
|
|
1
|
+
from concurrent.futures import Future as ConcurrentFuture, ProcessPoolExecutor
|
|
2
|
+
from hunterMakesPy import raiseIfNone
|
|
3
|
+
from mapFolding import Array1DLeavesTotal
|
|
4
|
+
from queue import Empty, Queue
|
|
5
|
+
from threading import Thread
|
|
6
|
+
import numpy
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
concurrencyManager = None
|
|
10
|
+
groupsOfFoldsTotal: int = 0
|
|
11
|
+
processingThread = None
|
|
12
|
+
queueFutures: Queue[ConcurrentFuture[int]] = Queue()
|
|
13
|
+
|
|
14
|
+
def initializeConcurrencyManager(maxWorkers: int | None=None, groupsOfFolds: int=0) -> None:
|
|
15
|
+
global concurrencyManager, queueFutures, groupsOfFoldsTotal, processingThread
|
|
16
|
+
concurrencyManager = ProcessPoolExecutor(max_workers=maxWorkers)
|
|
17
|
+
queueFutures = Queue()
|
|
18
|
+
groupsOfFoldsTotal = groupsOfFolds
|
|
19
|
+
processingThread = Thread(target=_processCompletedFutures)
|
|
20
|
+
processingThread.start()
|
|
21
|
+
|
|
22
|
+
def _processCompletedFutures() -> None:
|
|
23
|
+
global queueFutures, groupsOfFoldsTotal
|
|
24
|
+
while True:
|
|
25
|
+
try:
|
|
26
|
+
claimTicket: ConcurrentFuture[int] = queueFutures.get(timeout=1)
|
|
27
|
+
if claimTicket is None:
|
|
28
|
+
break
|
|
29
|
+
groupsOfFoldsTotal += claimTicket.result()
|
|
30
|
+
except Empty:
|
|
31
|
+
continue
|
|
32
|
+
|
|
33
|
+
def _filterAsymmetricFolds(leafBelow: Array1DLeavesTotal) -> int:
|
|
34
|
+
groupsOfFolds = 0
|
|
35
|
+
leafComparison: Array1DLeavesTotal = numpy.zeros_like(leafBelow)
|
|
36
|
+
leavesTotal = leafBelow.size - 1
|
|
37
|
+
indexLeaf = 0
|
|
38
|
+
leafConnectee = 0
|
|
39
|
+
while leafConnectee < leavesTotal + 1:
|
|
40
|
+
leafNumber = int(leafBelow[indexLeaf])
|
|
41
|
+
leafComparison[leafConnectee] = (leafNumber - indexLeaf + leavesTotal) % leavesTotal
|
|
42
|
+
indexLeaf = leafNumber
|
|
43
|
+
leafConnectee += 1
|
|
44
|
+
indexInMiddle = leavesTotal // 2
|
|
45
|
+
indexDistance = 0
|
|
46
|
+
while indexDistance < leavesTotal + 1:
|
|
47
|
+
ImaSymmetricFold = True
|
|
48
|
+
leafConnectee = 0
|
|
49
|
+
while leafConnectee < indexInMiddle:
|
|
50
|
+
if leafComparison[(indexDistance + leafConnectee) % (leavesTotal + 1)] != leafComparison[(indexDistance + leavesTotal - 1 - leafConnectee) % (leavesTotal + 1)]:
|
|
51
|
+
ImaSymmetricFold = False
|
|
52
|
+
break
|
|
53
|
+
leafConnectee += 1
|
|
54
|
+
if ImaSymmetricFold:
|
|
55
|
+
groupsOfFolds += 1
|
|
56
|
+
indexDistance += 1
|
|
57
|
+
return groupsOfFolds
|
|
58
|
+
|
|
59
|
+
def filterAsymmetricFolds(leafBelow: Array1DLeavesTotal) -> None:
|
|
60
|
+
global concurrencyManager, queueFutures
|
|
61
|
+
queueFutures.put(raiseIfNone(concurrencyManager).submit(_filterAsymmetricFolds, leafBelow.copy()))
|
|
62
|
+
|
|
63
|
+
def getAsymmetricFoldsTotal() -> int:
|
|
64
|
+
global concurrencyManager, queueFutures, processingThread
|
|
65
|
+
raiseIfNone(concurrencyManager).shutdown(wait=True)
|
|
66
|
+
queueFutures.put(None)
|
|
67
|
+
raiseIfNone(processingThread).join()
|
|
68
|
+
return groupsOfFoldsTotal
|