mapFolding 0.9.1__py3-none-any.whl → 0.9.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- mapFolding/__init__.py +49 -48
- mapFolding/basecamp.py +40 -35
- mapFolding/beDRY.py +74 -67
- mapFolding/oeis.py +56 -73
- mapFolding/reference/__init__.py +2 -2
- mapFolding/someAssemblyRequired/__init__.py +5 -3
- mapFolding/someAssemblyRequired/_theTypes.py +9 -1
- mapFolding/someAssemblyRequired/_tool_Make.py +0 -1
- mapFolding/someAssemblyRequired/_tool_Then.py +16 -8
- mapFolding/someAssemblyRequired/_toolboxAntecedents.py +111 -35
- mapFolding/someAssemblyRequired/_toolboxContainers.py +0 -1
- mapFolding/someAssemblyRequired/synthesizeNumbaJob.py +40 -40
- mapFolding/someAssemblyRequired/toolboxNumba.py +2 -10
- mapFolding/someAssemblyRequired/transformationTools.py +93 -69
- mapFolding/syntheticModules/numbaCount.py +9 -11
- mapFolding/theDao.py +19 -21
- mapFolding/theSSOT.py +29 -43
- mapFolding/toolboxFilesystem.py +29 -38
- {mapfolding-0.9.1.dist-info → mapfolding-0.9.2.dist-info}/METADATA +2 -1
- mapfolding-0.9.2.dist-info/RECORD +47 -0
- tests/test_other.py +0 -7
- mapfolding-0.9.1.dist-info/RECORD +0 -47
- /mapFolding/reference/{lunnanNumpy.py → lunnonNumpy.py} +0 -0
- /mapFolding/reference/{lunnanWhile.py → lunnonWhile.py} +0 -0
- {mapfolding-0.9.1.dist-info → mapfolding-0.9.2.dist-info}/WHEEL +0 -0
- {mapfolding-0.9.1.dist-info → mapfolding-0.9.2.dist-info}/entry_points.txt +0 -0
- {mapfolding-0.9.1.dist-info → mapfolding-0.9.2.dist-info}/licenses/LICENSE +0 -0
- {mapfolding-0.9.1.dist-info → mapfolding-0.9.2.dist-info}/top_level.txt +0 -0
|
@@ -18,7 +18,7 @@ This creates extremely fast, specialized implementations that can be run directl
|
|
|
18
18
|
as Python scripts or further compiled into standalone executables.
|
|
19
19
|
"""
|
|
20
20
|
|
|
21
|
-
from mapFolding
|
|
21
|
+
from mapFolding import getPathFilenameFoldsTotal, raiseIfNoneGitHubIssueNumber3, The
|
|
22
22
|
from mapFolding.someAssemblyRequired import (
|
|
23
23
|
ast_Identifier,
|
|
24
24
|
be,
|
|
@@ -29,15 +29,15 @@ from mapFolding.someAssemblyRequired import (
|
|
|
29
29
|
Make,
|
|
30
30
|
NodeChanger,
|
|
31
31
|
NodeTourist,
|
|
32
|
+
str_nameDOTname,
|
|
32
33
|
Then,
|
|
33
34
|
)
|
|
34
|
-
from mapFolding.someAssemblyRequired.toolboxNumba import parametersNumbaLight, SpicesJobNumba, decorateCallableWithNumba
|
|
35
|
-
from mapFolding.someAssemblyRequired.transformationTools import extractFunctionDef, write_astModule, makeInitializedComputationState
|
|
36
35
|
from mapFolding.someAssemblyRequired.RecipeJob import RecipeJob
|
|
37
|
-
from mapFolding import
|
|
38
|
-
from
|
|
39
|
-
from Z0Z_tools import autoDecodingRLE
|
|
36
|
+
from mapFolding.someAssemblyRequired.toolboxNumba import parametersNumbaLight, SpicesJobNumba, decorateCallableWithNumba
|
|
37
|
+
from mapFolding.someAssemblyRequired.transformationTools import dictionaryEstimates, extractFunctionDef, write_astModule, makeInitializedComputationState
|
|
40
38
|
from pathlib import PurePosixPath
|
|
39
|
+
from typing import cast, NamedTuple
|
|
40
|
+
from Z0Z_tools import autoDecodingRLE
|
|
41
41
|
import ast
|
|
42
42
|
"""Synthesize one file to compute `foldsTotal` of `mapShape`."""
|
|
43
43
|
|
|
@@ -150,7 +150,7 @@ def move_arg2FunctionDefDOTbodyAndAssignInitialValues(ingredientsFunction: Ingre
|
|
|
150
150
|
case 'scalar':
|
|
151
151
|
ImaAnnAssign.value.args[0].value = int(job.state.__dict__[ast_arg.arg]) # type: ignore
|
|
152
152
|
case 'array':
|
|
153
|
-
dataAsStrRLE: str = autoDecodingRLE(job.state.__dict__[ast_arg.arg],
|
|
153
|
+
dataAsStrRLE: str = autoDecodingRLE(job.state.__dict__[ast_arg.arg], True)
|
|
154
154
|
dataAs_astExpr: ast.expr = cast(ast.Expr, ast.parse(dataAsStrRLE).body[0]).value
|
|
155
155
|
ImaAnnAssign.value.args = [dataAs_astExpr] # type: ignore
|
|
156
156
|
case _:
|
|
@@ -228,9 +228,9 @@ if __name__ == '__main__':
|
|
|
228
228
|
writeStream = open('{job.pathFilenameFoldsTotal.as_posix()}', 'w')
|
|
229
229
|
writeStream.write(str(foldsTotal))
|
|
230
230
|
writeStream.close()
|
|
231
|
-
from mapFolding.oeis import getFoldsTotalKnown
|
|
232
|
-
print(foldsTotal == getFoldsTotalKnown({job.state.mapShape}))
|
|
233
231
|
"""
|
|
232
|
+
# from mapFolding.oeis import getFoldsTotalKnown
|
|
233
|
+
# print(foldsTotal == getFoldsTotalKnown({job.state.mapShape}))
|
|
234
234
|
ingredientsModule.appendLauncher(ast.parse(linesLaunch))
|
|
235
235
|
changeReturnParallelCallable = NodeChanger(be.Return, Then.replaceWith(Make.Return(job.shatteredDataclass.countingVariableName)))
|
|
236
236
|
changeReturnParallelCallable.visit(ingredientsCount.astFunctionDef)
|
|
@@ -238,38 +238,37 @@ if __name__ == '__main__':
|
|
|
238
238
|
|
|
239
239
|
ingredientsCount = move_arg2FunctionDefDOTbodyAndAssignInitialValues(ingredientsCount, job)
|
|
240
240
|
|
|
241
|
-
|
|
242
|
-
|
|
243
|
-
|
|
244
|
-
|
|
245
|
-
|
|
241
|
+
class DatatypeConfig(NamedTuple):
|
|
242
|
+
Z0Z_module: str_nameDOTname
|
|
243
|
+
fml: ast_Identifier
|
|
244
|
+
Z0Z_type_name: ast_Identifier
|
|
245
|
+
Z0Z_asname: ast_Identifier | None = None
|
|
246
|
+
|
|
247
|
+
listDatatypeConfigs = [
|
|
248
|
+
DatatypeConfig(fml='DatatypeLeavesTotal', Z0Z_module='numba', Z0Z_type_name='uint16'),
|
|
249
|
+
DatatypeConfig(fml='DatatypeElephino', Z0Z_module='numba', Z0Z_type_name='uint16'),
|
|
250
|
+
DatatypeConfig(fml='DatatypeFoldsTotal', Z0Z_module='numba', Z0Z_type_name='int64'),
|
|
251
|
+
]
|
|
252
|
+
|
|
253
|
+
for datatypeConfig in listDatatypeConfigs:
|
|
254
|
+
ingredientsModule.imports.addImportFrom_asStr(datatypeConfig.Z0Z_module, datatypeConfig.Z0Z_type_name)
|
|
255
|
+
statement = Make.Assign(
|
|
256
|
+
[Make.Name(datatypeConfig.fml, ast.Store())],
|
|
257
|
+
Make.Name(datatypeConfig.Z0Z_type_name)
|
|
258
|
+
)
|
|
259
|
+
ingredientsModule.appendPrologue(statement=statement)
|
|
246
260
|
|
|
247
|
-
|
|
248
|
-
Z0Z_type = 'uint8'
|
|
249
|
-
ingredientsModule.imports.addImportFrom_asStr('numba', Z0Z_type)
|
|
250
|
-
Z0Z_statement = Make.Assign([Make.Name(Z0Z_Identifier, ast.Store())], Make.Name(Z0Z_type))
|
|
251
|
-
ingredientsModule.appendPrologue(statement=Z0Z_statement)
|
|
261
|
+
ingredientsCount.imports.removeImportFromModule('mapFolding.theSSOT')
|
|
252
262
|
|
|
253
|
-
|
|
254
|
-
|
|
255
|
-
|
|
256
|
-
|
|
257
|
-
|
|
263
|
+
listNumPyTypeConfigs = [
|
|
264
|
+
DatatypeConfig(fml='Array1DLeavesTotal', Z0Z_module='numpy', Z0Z_type_name='uint16', Z0Z_asname='Array1DLeavesTotal'),
|
|
265
|
+
DatatypeConfig(fml='Array1DElephino', Z0Z_module='numpy', Z0Z_type_name='uint16', Z0Z_asname='Array1DElephino'),
|
|
266
|
+
DatatypeConfig(fml='Array3D', Z0Z_module='numpy', Z0Z_type_name='uint16', Z0Z_asname='Array3D'),
|
|
267
|
+
]
|
|
258
268
|
|
|
259
|
-
|
|
260
|
-
|
|
261
|
-
|
|
262
|
-
ingredientsCount.imports.removeImportFrom(Z0Z_module, None, Z0Z_asname)
|
|
263
|
-
Z0Z_type_name = 'uint8'
|
|
264
|
-
ingredientsCount.imports.addImportFrom_asStr(Z0Z_module, Z0Z_type_name, Z0Z_asname)
|
|
265
|
-
Z0Z_asname = 'Array1DElephino'
|
|
266
|
-
ingredientsCount.imports.removeImportFrom(Z0Z_module, None, Z0Z_asname)
|
|
267
|
-
Z0Z_type_name = 'uint8'
|
|
268
|
-
ingredientsCount.imports.addImportFrom_asStr(Z0Z_module, Z0Z_type_name, Z0Z_asname)
|
|
269
|
-
Z0Z_asname = 'Array3D'
|
|
270
|
-
ingredientsCount.imports.removeImportFrom(Z0Z_module, None, Z0Z_asname)
|
|
271
|
-
Z0Z_type_name = 'uint8'
|
|
272
|
-
ingredientsCount.imports.addImportFrom_asStr(Z0Z_module, Z0Z_type_name, Z0Z_asname)
|
|
269
|
+
for typeConfig in listNumPyTypeConfigs:
|
|
270
|
+
ingredientsCount.imports.removeImportFrom(typeConfig.Z0Z_module, None, typeConfig.fml)
|
|
271
|
+
ingredientsCount.imports.addImportFrom_asStr(typeConfig.Z0Z_module, typeConfig.Z0Z_type_name, typeConfig.Z0Z_asname)
|
|
273
272
|
|
|
274
273
|
ingredientsCount.astFunctionDef.decorator_list = [] # TODO low-priority, handle this more elegantly
|
|
275
274
|
# TODO when I add the function signature in numba style back to the decorator, the logic needs to handle `ProgressBarType:`
|
|
@@ -300,9 +299,10 @@ if __name__ == '__main__':
|
|
|
300
299
|
"""
|
|
301
300
|
|
|
302
301
|
if __name__ == '__main__':
|
|
303
|
-
mapShape = (
|
|
302
|
+
mapShape = (2,2,2,2,2,2,2,2)
|
|
304
303
|
state = makeInitializedComputationState(mapShape)
|
|
305
|
-
foldsTotalEstimated = getFoldsTotalKnown(state.mapShape) // state.leavesTotal
|
|
304
|
+
# foldsTotalEstimated = getFoldsTotalKnown(state.mapShape) // state.leavesTotal
|
|
305
|
+
foldsTotalEstimated = dictionaryEstimates[state.mapShape] // state.leavesTotal
|
|
306
306
|
pathModule = PurePosixPath(The.pathPackage, 'jobs')
|
|
307
307
|
pathFilenameFoldsTotal = PurePosixPath(getPathFilenameFoldsTotal(state.mapShape, pathModule))
|
|
308
308
|
aJob = RecipeJob(state, foldsTotalEstimated, pathModule=pathModule, pathFilenameFoldsTotal=pathFilenameFoldsTotal)
|
|
@@ -16,8 +16,8 @@ performance improvements while preserving code semantics and correctness.
|
|
|
16
16
|
"""
|
|
17
17
|
|
|
18
18
|
from collections.abc import Callable, Sequence
|
|
19
|
-
from mapFolding.someAssemblyRequired import
|
|
20
|
-
from mapFolding.someAssemblyRequired.transformationTools import
|
|
19
|
+
from mapFolding.someAssemblyRequired import ast_Identifier, be, IngredientsFunction, Make, NodeTourist, RecipeSynthesizeFlow, str_nameDOTname, Then
|
|
20
|
+
from mapFolding.someAssemblyRequired.transformationTools import makeNewFlow, write_astModule
|
|
21
21
|
from numba.core.compiler import CompilerBase as numbaCompilerBase
|
|
22
22
|
from typing import Any, cast, Final, TYPE_CHECKING, TypeGuard
|
|
23
23
|
import ast
|
|
@@ -154,14 +154,6 @@ class SpicesJobNumba:
|
|
|
154
154
|
parametersNumba: ParametersNumba = dataclasses.field(default_factory=ParametersNumba) # type: ignore
|
|
155
155
|
|
|
156
156
|
# Consolidate settings classes through inheritance https://github.com/hunterhogan/mapFolding/issues/15
|
|
157
|
-
class be:
|
|
158
|
-
@staticmethod
|
|
159
|
-
def Call(node: ast.AST) -> TypeGuard[ast.Call]:
|
|
160
|
-
return isinstance(node, ast.Call)
|
|
161
|
-
@staticmethod
|
|
162
|
-
def Return(node: ast.AST) -> TypeGuard[ast.Return]:
|
|
163
|
-
return isinstance(node, ast.Return)
|
|
164
|
-
|
|
165
157
|
def makeNumbaFlow(numbaFlow: RecipeSynthesizeFlow) -> None:
|
|
166
158
|
"""
|
|
167
159
|
Transform standard Python algorithm code into optimized Numba implementations.
|
|
@@ -119,7 +119,7 @@ def makeDictionaryFunctionDef(module: ast.Module) -> dict[ast_Identifier, ast.Fu
|
|
|
119
119
|
A dictionary mapping function identifiers to their AST function definition nodes.
|
|
120
120
|
"""
|
|
121
121
|
dictionaryIdentifier2FunctionDef: dict[ast_Identifier, ast.FunctionDef] = {}
|
|
122
|
-
NodeTourist(
|
|
122
|
+
NodeTourist(be.FunctionDef, Then.updateKeyValueIn(DOT.name, Then.extractIt, dictionaryIdentifier2FunctionDef)).visit(module)
|
|
123
123
|
return dictionaryIdentifier2FunctionDef
|
|
124
124
|
|
|
125
125
|
def inlineFunctionDef(identifierToInline: ast_Identifier, module: ast.Module) -> ast.FunctionDef:
|
|
@@ -148,7 +148,7 @@ def inlineFunctionDef(identifierToInline: ast_Identifier, module: ast.Module) ->
|
|
|
148
148
|
raise ValueError(f"FunctionDefToInline not found in dictionaryIdentifier2FunctionDef: {identifierToInline = }") from ERRORmessage
|
|
149
149
|
|
|
150
150
|
listIdentifiersCalledFunctions: list[ast_Identifier] = []
|
|
151
|
-
findIdentifiersToInline = NodeTourist(ifThis.isCallToName,
|
|
151
|
+
findIdentifiersToInline = NodeTourist(findThis = ifThis.isCallToName, doThat = grab.funcDOTidAttribute(Then.appendTo(listIdentifiersCalledFunctions)))
|
|
152
152
|
findIdentifiersToInline.visit(FunctionDefToInline)
|
|
153
153
|
|
|
154
154
|
dictionary4Inlining: dict[ast_Identifier, ast.FunctionDef] = {}
|
|
@@ -169,7 +169,8 @@ def inlineFunctionDef(identifierToInline: ast_Identifier, module: ast.Module) ->
|
|
|
169
169
|
if NodeTourist(ifThis.matchesMeButNotAnyDescendant(ifThis.isCall_Identifier(identifier)), Then.extractIt).captureLastMatch(module) is not None:
|
|
170
170
|
FunctionDefTarget = dictionaryFunctionDef[identifier]
|
|
171
171
|
if len(FunctionDefTarget.body) == 1:
|
|
172
|
-
|
|
172
|
+
replacement = NodeTourist(be.Return, Then.extractIt(DOT.value)).captureLastMatch(FunctionDefTarget)
|
|
173
|
+
inliner = NodeChanger(ifThis.isCall_Identifier(identifier), Then.replaceWith(replacement))
|
|
173
174
|
for astFunctionDef in dictionary4Inlining.values():
|
|
174
175
|
inliner.visit(astFunctionDef)
|
|
175
176
|
else:
|
|
@@ -179,7 +180,8 @@ def inlineFunctionDef(identifierToInline: ast_Identifier, module: ast.Module) ->
|
|
|
179
180
|
|
|
180
181
|
for identifier, FunctionDefTarget in dictionary4Inlining.items():
|
|
181
182
|
if len(FunctionDefTarget.body) == 1:
|
|
182
|
-
|
|
183
|
+
replacement = NodeTourist(be.Return, Then.extractIt(DOT.value)).captureLastMatch(FunctionDefTarget)
|
|
184
|
+
inliner = NodeChanger(ifThis.isCall_Identifier(identifier), Then.replaceWith(replacement))
|
|
183
185
|
inliner.visit(FunctionDefToInline)
|
|
184
186
|
else:
|
|
185
187
|
inliner = NodeChanger(ifThis.isAssignAndValueIs(ifThis.isCall_Identifier(identifier)),Then.replaceWith(FunctionDefTarget.body[0:-1]))
|
|
@@ -286,7 +288,7 @@ class DeReConstructField2ast:
|
|
|
286
288
|
self.ast_keyword_field__field = Make.keyword(self.name, self.astName)
|
|
287
289
|
self.ast_nameDOTname = Make.Attribute(Make.Name(dataclassesDOTdataclassInstance_Identifier), self.name)
|
|
288
290
|
|
|
289
|
-
sherpa = NodeTourist(ifThis.isAnnAssign_targetIs(ifThis.isName_Identifier(self.name)), Then.extractIt(DOT.annotation)).captureLastMatch(dataclassClassDef)
|
|
291
|
+
sherpa = NodeTourist(ifThis.isAnnAssign_targetIs(ifThis.isName_Identifier(self.name)), Then.extractIt(DOT.annotation)).captureLastMatch(dataclassClassDef)
|
|
290
292
|
if sherpa is None: raise raiseIfNoneGitHubIssueNumber3
|
|
291
293
|
else: self.astAnnotation = sherpa
|
|
292
294
|
|
|
@@ -441,62 +443,25 @@ def write_astModule(ingredients: IngredientsModule, pathFilename: PathLike[Any]
|
|
|
441
443
|
if packageName:
|
|
442
444
|
autoflake_additional_imports.append(packageName)
|
|
443
445
|
pythonSource = autoflake_fix_code(pythonSource, autoflake_additional_imports, expand_star_imports=False, remove_all_unused_imports=True, remove_duplicate_keys = False, remove_unused_variables = False)
|
|
444
|
-
# pythonSource = python_minifier.minify(pythonSource)
|
|
446
|
+
# pythonSource = python_minifier.minify(pythonSource, remove_annotations=False, hoist_literals=False)
|
|
445
447
|
writeStringToHere(pythonSource, pathFilename)
|
|
446
448
|
|
|
447
449
|
# END of acceptable classes and functions ======================================================
|
|
448
|
-
def removeUnusedParameters(ingredientsFunction: IngredientsFunction):
|
|
450
|
+
def removeUnusedParameters(ingredientsFunction: IngredientsFunction) -> IngredientsFunction:
|
|
449
451
|
list_argCuzMyBrainRefusesToThink = ingredientsFunction.astFunctionDef.args.args + ingredientsFunction.astFunctionDef.args.posonlyargs + ingredientsFunction.astFunctionDef.args.kwonlyargs
|
|
450
452
|
list_arg_arg: list[ast_Identifier] = [ast_arg.arg for ast_arg in list_argCuzMyBrainRefusesToThink]
|
|
451
453
|
listName: list[ast.Name] = []
|
|
452
|
-
|
|
454
|
+
fauxFunctionDef = deepcopy(ingredientsFunction.astFunctionDef)
|
|
455
|
+
NodeChanger(be.Return, Then.removeIt).visit(fauxFunctionDef)
|
|
456
|
+
NodeTourist(be.Name, Then.appendTo(listName)).visit(fauxFunctionDef)
|
|
453
457
|
list_Identifiers: list[ast_Identifier] = [astName.id for astName in listName]
|
|
454
458
|
list_IdentifiersNotUsed: list[ast_Identifier] = list(set(list_arg_arg) - set(list_Identifiers))
|
|
455
|
-
|
|
456
|
-
|
|
457
|
-
|
|
458
|
-
|
|
459
|
-
(3,15): 824761667826225,
|
|
460
|
-
(3,3,3,3): 85109616000000000000000000000000,
|
|
461
|
-
(8,8): 791274195985524900,
|
|
462
|
-
}
|
|
463
|
-
|
|
464
|
-
# END of marginal classes and functions ======================================================
|
|
465
|
-
def Z0Z_lameFindReplace(astTree: 个, mappingFindReplaceNodes: Mapping[ast.AST, ast.AST]) -> 个:
|
|
466
|
-
"""
|
|
467
|
-
Recursively replace AST nodes based on a mapping of find-replace pairs.
|
|
468
|
-
|
|
469
|
-
This function applies brute-force node replacement throughout an AST tree
|
|
470
|
-
by comparing textual representations of nodes. While not the most efficient
|
|
471
|
-
approach, it provides a reliable way to replace complex nested structures
|
|
472
|
-
when more precise targeting methods are difficult to implement.
|
|
473
|
-
|
|
474
|
-
The function continues replacing nodes until no more changes are detected
|
|
475
|
-
in the AST's textual representation, ensuring complete replacement throughout
|
|
476
|
-
the tree structure.
|
|
477
|
-
|
|
478
|
-
Parameters:
|
|
479
|
-
astTree: The AST structure to modify.
|
|
480
|
-
mappingFindReplaceNodes: A mapping from source nodes to replacement nodes.
|
|
481
|
-
|
|
482
|
-
Returns:
|
|
483
|
-
The modified AST structure with all matching nodes replaced.
|
|
484
|
-
"""
|
|
485
|
-
keepGoing = True
|
|
486
|
-
newTree = deepcopy(astTree)
|
|
487
|
-
|
|
488
|
-
while keepGoing:
|
|
489
|
-
for nodeFind, nodeReplace in mappingFindReplaceNodes.items():
|
|
490
|
-
NodeChanger(ifThis.Z0Z_unparseIs(nodeFind), Then.replaceWith(nodeReplace)).visit(newTree)
|
|
491
|
-
|
|
492
|
-
if ast.unparse(newTree) == ast.unparse(astTree):
|
|
493
|
-
keepGoing = False
|
|
494
|
-
else:
|
|
495
|
-
astTree = deepcopy(newTree)
|
|
496
|
-
return newTree
|
|
459
|
+
for arg_Identifier in list_IdentifiersNotUsed:
|
|
460
|
+
remove_arg = NodeChanger(ifThis.is_arg_Identifier(arg_Identifier), Then.removeIt)
|
|
461
|
+
remove_arg.visit(ingredientsFunction.astFunctionDef)
|
|
462
|
+
return ingredientsFunction
|
|
497
463
|
|
|
498
464
|
def makeNewFlow(recipeFlow: RecipeSynthesizeFlow) -> IngredientsModule:
|
|
499
|
-
# TODO a tool to automatically remove unused variables from the ArgumentsSpecification (return, and returns) _might_ be nice.
|
|
500
465
|
# Figure out dynamic flow control to synthesized modules https://github.com/hunterhogan/mapFolding/issues/4
|
|
501
466
|
listAllIngredientsFunctions = [
|
|
502
467
|
(ingredientsInitialize := astModuleToIngredientsFunction(recipeFlow.source_astModule, recipeFlow.sourceCallableInitialize)),
|
|
@@ -537,7 +502,7 @@ def makeNewFlow(recipeFlow: RecipeSynthesizeFlow) -> IngredientsModule:
|
|
|
537
502
|
for ingredients in listAllIngredientsFunctions:
|
|
538
503
|
for source_Identifier, recipe_Identifier in listFindReplace:
|
|
539
504
|
updateName = NodeChanger(ifThis.isName_Identifier(source_Identifier) , grab.idAttribute(Then.replaceWith(recipe_Identifier)))
|
|
540
|
-
update_arg = NodeChanger(ifThis.isArgument_Identifier(source_Identifier), grab.argAttribute(Then.replaceWith(recipe_Identifier)))
|
|
505
|
+
update_arg = NodeChanger(ifThis.isArgument_Identifier(source_Identifier), grab.argAttribute(Then.replaceWith(recipe_Identifier))) # type: ignore
|
|
541
506
|
updateName.visit(ingredients.astFunctionDef)
|
|
542
507
|
update_arg.visit(ingredients.astFunctionDef)
|
|
543
508
|
|
|
@@ -555,26 +520,27 @@ def makeNewFlow(recipeFlow: RecipeSynthesizeFlow) -> IngredientsModule:
|
|
|
555
520
|
# Change callable parameters and Call to the callable at the same time ====
|
|
556
521
|
# sequentialCallable =========================================================
|
|
557
522
|
if recipeFlow.removeDataclassSequential:
|
|
558
|
-
ingredientsSequential
|
|
559
|
-
|
|
560
|
-
changeReturnSequentialCallable = NodeChanger(be.Return, Then.replaceWith(Make.Return(shatteredDataclass.fragments4AssignmentOrParameters)))
|
|
561
|
-
ingredientsSequential.astFunctionDef.returns = shatteredDataclass.signatureReturnAnnotation
|
|
562
|
-
replaceAssignSequentialCallable = NodeChanger(ifThis.isAssignAndValueIs(ifThis.isCall_Identifier(recipeFlow.callableSequential)), Then.replaceWith(Make.Assign(listTargets=[shatteredDataclass.fragments4AssignmentOrParameters], value=astCallSequentialCallable)))
|
|
523
|
+
ingredientsSequential = removeDataclassFromFunction(ingredientsSequential, shatteredDataclass)
|
|
524
|
+
ingredientsDispatcher = unpackDataclassCallFunctionRepackDataclass(ingredientsDispatcher, recipeFlow.callableSequential, shatteredDataclass)
|
|
563
525
|
|
|
564
|
-
|
|
565
|
-
|
|
566
|
-
|
|
567
|
-
changeReturnSequentialCallable.visit(ingredientsSequential.astFunctionDef)
|
|
568
|
-
replaceAssignSequentialCallable.visit(ingredientsDispatcher.astFunctionDef)
|
|
569
|
-
unpack4sequentialCallable.visit(ingredientsDispatcher.astFunctionDef)
|
|
570
|
-
repack4sequentialCallable.visit(ingredientsDispatcher.astFunctionDef)
|
|
571
|
-
|
|
572
|
-
ingredientsSequential.astFunctionDef = Z0Z_lameFindReplace(ingredientsSequential.astFunctionDef, shatteredDataclass.map_stateDOTfield2Name)
|
|
526
|
+
if recipeFlow.removeDataclassInitialize:
|
|
527
|
+
ingredientsInitialize = removeDataclassFromFunction(ingredientsInitialize, shatteredDataclass)
|
|
528
|
+
ingredientsDispatcher = unpackDataclassCallFunctionRepackDataclass(ingredientsDispatcher, recipeFlow.callableInitialize, shatteredDataclass)
|
|
573
529
|
|
|
574
530
|
# parallelCallable =========================================================
|
|
575
531
|
if recipeFlow.removeDataclassParallel:
|
|
576
532
|
ingredientsParallel.astFunctionDef.args = Make.argumentsSpecification(args=shatteredDataclass.list_argAnnotated4ArgumentsSpecification)
|
|
577
|
-
|
|
533
|
+
|
|
534
|
+
ingredientsParallel.astFunctionDef = Z0Z_lameFindReplace(ingredientsParallel.astFunctionDef, shatteredDataclass.map_stateDOTfield2Name)
|
|
535
|
+
|
|
536
|
+
ingredientsParallel = removeUnusedParameters(ingredientsParallel)
|
|
537
|
+
|
|
538
|
+
list_argCuzMyBrainRefusesToThink = ingredientsParallel.astFunctionDef.args.args + ingredientsParallel.astFunctionDef.args.posonlyargs + ingredientsParallel.astFunctionDef.args.kwonlyargs
|
|
539
|
+
list_arg_arg: list[ast_Identifier] = [ast_arg.arg for ast_arg in list_argCuzMyBrainRefusesToThink]
|
|
540
|
+
|
|
541
|
+
listParameters = [parameter for parameter in shatteredDataclass.listName4Parameters if parameter.id in list_arg_arg]
|
|
542
|
+
|
|
543
|
+
replaceCall2concurrencyManager = NodeChanger(ifThis.isCallAttributeNamespace_Identifier(recipeFlow.concurrencyManagerNamespace, recipeFlow.concurrencyManagerIdentifier), Then.replaceWith(Make.Call(Make.Attribute(Make.Name(recipeFlow.concurrencyManagerNamespace), recipeFlow.concurrencyManagerIdentifier), listArguments=[Make.Name(recipeFlow.callableParallel)] + listParameters)))
|
|
578
544
|
|
|
579
545
|
# NOTE I am dissatisfied with this logic for many reasons, including that it requires separate NodeCollector and NodeReplacer instances.
|
|
580
546
|
astCallConcurrencyResult: list[ast.Call] = []
|
|
@@ -591,16 +557,74 @@ def makeNewFlow(recipeFlow: RecipeSynthesizeFlow) -> IngredientsModule:
|
|
|
591
557
|
replaceCall2concurrencyManager.visit(ingredientsDispatcher.astFunctionDef)
|
|
592
558
|
changeReturnParallelCallable.visit(ingredientsParallel.astFunctionDef)
|
|
593
559
|
|
|
594
|
-
ingredientsParallel.astFunctionDef = Z0Z_lameFindReplace(ingredientsParallel.astFunctionDef, shatteredDataclass.map_stateDOTfield2Name)
|
|
595
|
-
|
|
596
560
|
# Module-level transformations ===========================================================
|
|
597
561
|
ingredientsModuleNumbaUnified = IngredientsModule(ingredientsFunction=listAllIngredientsFunctions, imports=LedgerOfImports(recipeFlow.source_astModule))
|
|
598
562
|
ingredientsModuleNumbaUnified.removeImportFromModule('numpy')
|
|
599
563
|
|
|
600
564
|
return ingredientsModuleNumbaUnified
|
|
601
565
|
|
|
566
|
+
def removeDataclassFromFunction(ingredientsTarget: IngredientsFunction, shatteredDataclass: ShatteredDataclass) -> IngredientsFunction:
|
|
567
|
+
ingredientsTarget.astFunctionDef.args = Make.argumentsSpecification(args=shatteredDataclass.list_argAnnotated4ArgumentsSpecification)
|
|
568
|
+
ingredientsTarget.astFunctionDef.returns = shatteredDataclass.signatureReturnAnnotation
|
|
569
|
+
changeReturnCallable = NodeChanger(be.Return, Then.replaceWith(Make.Return(shatteredDataclass.fragments4AssignmentOrParameters)))
|
|
570
|
+
changeReturnCallable.visit(ingredientsTarget.astFunctionDef)
|
|
571
|
+
ingredientsTarget.astFunctionDef = Z0Z_lameFindReplace(ingredientsTarget.astFunctionDef, shatteredDataclass.map_stateDOTfield2Name)
|
|
572
|
+
return ingredientsTarget
|
|
573
|
+
|
|
574
|
+
def unpackDataclassCallFunctionRepackDataclass(ingredientsCaller: IngredientsFunction, targetCallableIdentifier: ast_Identifier, shatteredDataclass: ShatteredDataclass) -> IngredientsFunction:
|
|
575
|
+
astCallTargetCallable = Make.Call(Make.Name(targetCallableIdentifier), shatteredDataclass.listName4Parameters)
|
|
576
|
+
replaceAssignTargetCallable = NodeChanger(ifThis.isAssignAndValueIs(ifThis.isCall_Identifier(targetCallableIdentifier)), Then.replaceWith(Make.Assign(listTargets=[shatteredDataclass.fragments4AssignmentOrParameters], value=astCallTargetCallable)))
|
|
577
|
+
unpack4targetCallable = NodeChanger(ifThis.isAssignAndValueIs(ifThis.isCall_Identifier(targetCallableIdentifier)), Then.insertThisAbove(shatteredDataclass.listUnpack))
|
|
578
|
+
repack4targetCallable = NodeChanger(ifThis.isAssignAndValueIs(ifThis.isCall_Identifier(targetCallableIdentifier)), Then.insertThisBelow([shatteredDataclass.repack]))
|
|
579
|
+
replaceAssignTargetCallable.visit(ingredientsCaller.astFunctionDef)
|
|
580
|
+
unpack4targetCallable.visit(ingredientsCaller.astFunctionDef)
|
|
581
|
+
repack4targetCallable.visit(ingredientsCaller.astFunctionDef)
|
|
582
|
+
return ingredientsCaller
|
|
583
|
+
|
|
602
584
|
def getIt(astCallConcurrencyResult: list[ast.Call]) -> Callable[[ast.AST], ast.AST]:
|
|
603
585
|
def workhorse(node: ast.AST) -> ast.AST:
|
|
604
586
|
NodeTourist(be.Call, Then.appendTo(astCallConcurrencyResult)).visit(node)
|
|
605
587
|
return node
|
|
606
588
|
return workhorse
|
|
589
|
+
|
|
590
|
+
dictionaryEstimates: dict[tuple[int, ...], int] = {
|
|
591
|
+
(2,2,2,2,2,2,2,2): 798148657152000,
|
|
592
|
+
(2,21): 776374224866624,
|
|
593
|
+
(3,15): 824761667826225,
|
|
594
|
+
(3,3,3,3): 85109616000000000000000000000000,
|
|
595
|
+
(8,8): 791274195985524900,
|
|
596
|
+
}
|
|
597
|
+
|
|
598
|
+
# END of marginal classes and functions ======================================================
|
|
599
|
+
def Z0Z_lameFindReplace(astTree: 个, mappingFindReplaceNodes: Mapping[ast.AST, ast.AST]) -> 个:
|
|
600
|
+
"""
|
|
601
|
+
Recursively replace AST nodes based on a mapping of find-replace pairs.
|
|
602
|
+
|
|
603
|
+
This function applies brute-force node replacement throughout an AST tree
|
|
604
|
+
by comparing textual representations of nodes. While not the most efficient
|
|
605
|
+
approach, it provides a reliable way to replace complex nested structures
|
|
606
|
+
when more precise targeting methods are difficult to implement.
|
|
607
|
+
|
|
608
|
+
The function continues replacing nodes until no more changes are detected
|
|
609
|
+
in the AST's textual representation, ensuring complete replacement throughout
|
|
610
|
+
the tree structure.
|
|
611
|
+
|
|
612
|
+
Parameters:
|
|
613
|
+
astTree: The AST structure to modify.
|
|
614
|
+
mappingFindReplaceNodes: A mapping from source nodes to replacement nodes.
|
|
615
|
+
|
|
616
|
+
Returns:
|
|
617
|
+
The modified AST structure with all matching nodes replaced.
|
|
618
|
+
"""
|
|
619
|
+
keepGoing = True
|
|
620
|
+
newTree = deepcopy(astTree)
|
|
621
|
+
|
|
622
|
+
while keepGoing:
|
|
623
|
+
for nodeFind, nodeReplace in mappingFindReplaceNodes.items():
|
|
624
|
+
NodeChanger(ifThis.Z0Z_unparseIs(nodeFind), Then.replaceWith(nodeReplace)).visit(newTree)
|
|
625
|
+
|
|
626
|
+
if ast.unparse(newTree) == ast.unparse(astTree):
|
|
627
|
+
keepGoing = False
|
|
628
|
+
else:
|
|
629
|
+
astTree = deepcopy(newTree)
|
|
630
|
+
return newTree
|
|
@@ -4,7 +4,7 @@ from mapFolding.theSSOT import Array1DElephino, Array1DFoldsTotal, Array1DLeaves
|
|
|
4
4
|
from numba import jit
|
|
5
5
|
|
|
6
6
|
def countInitialize(state: ComputationState) -> ComputationState:
|
|
7
|
-
while state.
|
|
7
|
+
while state.gap1ndex == 0:
|
|
8
8
|
if state.leaf1ndex <= 1 or state.leafBelow[0] == 1:
|
|
9
9
|
state.dimensionsUnconstrained = state.dimensionsTotal
|
|
10
10
|
state.gap1ndexCeiling = state.gapRangeStart[state.leaf1ndex - 1]
|
|
@@ -42,12 +42,10 @@ def countInitialize(state: ComputationState) -> ComputationState:
|
|
|
42
42
|
state.leafAbove[state.leafBelow[state.leaf1ndex]] = state.leaf1ndex
|
|
43
43
|
state.gapRangeStart[state.leaf1ndex] = state.gap1ndex
|
|
44
44
|
state.leaf1ndex += 1
|
|
45
|
-
if state.gap1ndex > 0:
|
|
46
|
-
break
|
|
47
45
|
return state
|
|
48
46
|
|
|
49
47
|
@jit(_nrt=True, boundscheck=False, cache=True, error_model='numpy', fastmath=True, forceinline=True, inline='always', looplift=False, no_cfunc_wrapper=False, no_cpython_wrapper=False, nopython=True, parallel=False)
|
|
50
|
-
def countParallel(
|
|
48
|
+
def countParallel(leavesTotal: DatatypeLeavesTotal, taskDivisions: DatatypeLeavesTotal, connectionGraph: Array3D, dimensionsTotal: DatatypeLeavesTotal, countDimensionsGapped: Array1DLeavesTotal, dimensionsUnconstrained: DatatypeLeavesTotal, gapRangeStart: Array1DElephino, gapsWhere: Array1DLeavesTotal, leafAbove: Array1DLeavesTotal, leafBelow: Array1DLeavesTotal, foldGroups: Array1DFoldsTotal, gap1ndex: DatatypeElephino, gap1ndexCeiling: DatatypeElephino, groupsOfFolds: DatatypeFoldsTotal, indexDimension: DatatypeLeavesTotal, indexMiniGap: DatatypeElephino, leaf1ndex: DatatypeLeavesTotal, leafConnectee: DatatypeLeavesTotal, taskIndex: DatatypeLeavesTotal) -> DatatypeFoldsTotal:
|
|
51
49
|
while leaf1ndex > 0:
|
|
52
50
|
if leaf1ndex <= 1 or leafBelow[0] == 1:
|
|
53
51
|
if leaf1ndex > leavesTotal:
|
|
@@ -92,7 +90,7 @@ def countParallel(mapShape: tuple[DatatypeLeavesTotal, ...], leavesTotal: Dataty
|
|
|
92
90
|
return groupsOfFolds
|
|
93
91
|
|
|
94
92
|
@jit(_nrt=True, boundscheck=False, cache=True, error_model='numpy', fastmath=True, forceinline=True, inline='always', looplift=False, no_cfunc_wrapper=False, no_cpython_wrapper=False, nopython=True, parallel=False)
|
|
95
|
-
def countSequential(mapShape: tuple[DatatypeLeavesTotal, ...], leavesTotal: DatatypeLeavesTotal, taskDivisions: DatatypeLeavesTotal, concurrencyLimit: DatatypeElephino, connectionGraph: Array3D, dimensionsTotal: DatatypeLeavesTotal, countDimensionsGapped: Array1DLeavesTotal, dimensionsUnconstrained: DatatypeLeavesTotal, gapRangeStart: Array1DElephino, gapsWhere: Array1DLeavesTotal, leafAbove: Array1DLeavesTotal, leafBelow: Array1DLeavesTotal, foldGroups: Array1DFoldsTotal, foldsTotal: DatatypeFoldsTotal, gap1ndex:
|
|
93
|
+
def countSequential(mapShape: tuple[DatatypeLeavesTotal, ...], leavesTotal: DatatypeLeavesTotal, taskDivisions: DatatypeLeavesTotal, concurrencyLimit: DatatypeElephino, connectionGraph: Array3D, dimensionsTotal: DatatypeLeavesTotal, countDimensionsGapped: Array1DLeavesTotal, dimensionsUnconstrained: DatatypeLeavesTotal, gapRangeStart: Array1DElephino, gapsWhere: Array1DLeavesTotal, leafAbove: Array1DLeavesTotal, leafBelow: Array1DLeavesTotal, foldGroups: Array1DFoldsTotal, foldsTotal: DatatypeFoldsTotal, gap1ndex: DatatypeElephino, gap1ndexCeiling: DatatypeElephino, groupsOfFolds: DatatypeFoldsTotal, indexDimension: DatatypeLeavesTotal, indexLeaf: DatatypeLeavesTotal, indexMiniGap: DatatypeElephino, leaf1ndex: DatatypeLeavesTotal, leafConnectee: DatatypeLeavesTotal, taskIndex: DatatypeLeavesTotal) -> tuple[tuple[DatatypeLeavesTotal, ...], DatatypeLeavesTotal, DatatypeLeavesTotal, DatatypeElephino, Array3D, DatatypeLeavesTotal, Array1DLeavesTotal, DatatypeLeavesTotal, Array1DElephino, Array1DLeavesTotal, Array1DLeavesTotal, Array1DLeavesTotal, Array1DFoldsTotal, DatatypeFoldsTotal, DatatypeElephino, DatatypeElephino, DatatypeFoldsTotal, DatatypeLeavesTotal, DatatypeLeavesTotal, DatatypeElephino, DatatypeLeavesTotal, DatatypeLeavesTotal, DatatypeLeavesTotal]:
|
|
96
94
|
while leaf1ndex > 0:
|
|
97
95
|
if leaf1ndex <= 1 or leafBelow[0] == 1:
|
|
98
96
|
if leaf1ndex > leavesTotal:
|
|
@@ -124,7 +122,7 @@ def countSequential(mapShape: tuple[DatatypeLeavesTotal, ...], leavesTotal: Data
|
|
|
124
122
|
leaf1ndex -= 1
|
|
125
123
|
leafBelow[leafAbove[leaf1ndex]] = leafBelow[leaf1ndex]
|
|
126
124
|
leafAbove[leafBelow[leaf1ndex]] = leafAbove[leaf1ndex]
|
|
127
|
-
if
|
|
125
|
+
if leaf1ndex == 3 and groupsOfFolds:
|
|
128
126
|
groupsOfFolds *= 2
|
|
129
127
|
break
|
|
130
128
|
if leaf1ndex > 0:
|
|
@@ -161,16 +159,16 @@ def doTheNeedful(state: ComputationState) -> ComputationState:
|
|
|
161
159
|
leafBelow: Array1DLeavesTotal = state.leafBelow
|
|
162
160
|
foldGroups: Array1DFoldsTotal = state.foldGroups
|
|
163
161
|
foldsTotal: DatatypeFoldsTotal = state.foldsTotal
|
|
164
|
-
gap1ndex:
|
|
162
|
+
gap1ndex: DatatypeElephino = state.gap1ndex
|
|
165
163
|
gap1ndexCeiling: DatatypeElephino = state.gap1ndexCeiling
|
|
166
164
|
groupsOfFolds: DatatypeFoldsTotal = state.groupsOfFolds
|
|
167
165
|
indexDimension: DatatypeLeavesTotal = state.indexDimension
|
|
168
166
|
indexLeaf: DatatypeLeavesTotal = state.indexLeaf
|
|
169
167
|
indexMiniGap: DatatypeElephino = state.indexMiniGap
|
|
170
168
|
leaf1ndex: DatatypeLeavesTotal = state.leaf1ndex
|
|
171
|
-
leafConnectee:
|
|
169
|
+
leafConnectee: DatatypeLeavesTotal = state.leafConnectee
|
|
172
170
|
taskIndex: DatatypeLeavesTotal = state.taskIndex
|
|
173
|
-
dictionaryConcurrency[indexSherpa] = concurrencyManager.submit(countParallel,
|
|
171
|
+
dictionaryConcurrency[indexSherpa] = concurrencyManager.submit(countParallel, leavesTotal, taskDivisions, connectionGraph, dimensionsTotal, countDimensionsGapped, dimensionsUnconstrained, gapRangeStart, gapsWhere, leafAbove, leafBelow, foldGroups, gap1ndex, gap1ndexCeiling, groupsOfFolds, indexDimension, indexMiniGap, leaf1ndex, leafConnectee, taskIndex)
|
|
174
172
|
for indexSherpa in range(stateParallel.taskDivisions):
|
|
175
173
|
stateParallel.foldGroups[indexSherpa] = dictionaryConcurrency[indexSherpa].result()
|
|
176
174
|
state = stateParallel
|
|
@@ -189,14 +187,14 @@ def doTheNeedful(state: ComputationState) -> ComputationState:
|
|
|
189
187
|
leafBelow: Array1DLeavesTotal = state.leafBelow
|
|
190
188
|
foldGroups: Array1DFoldsTotal = state.foldGroups
|
|
191
189
|
foldsTotal: DatatypeFoldsTotal = state.foldsTotal
|
|
192
|
-
gap1ndex:
|
|
190
|
+
gap1ndex: DatatypeElephino = state.gap1ndex
|
|
193
191
|
gap1ndexCeiling: DatatypeElephino = state.gap1ndexCeiling
|
|
194
192
|
groupsOfFolds: DatatypeFoldsTotal = state.groupsOfFolds
|
|
195
193
|
indexDimension: DatatypeLeavesTotal = state.indexDimension
|
|
196
194
|
indexLeaf: DatatypeLeavesTotal = state.indexLeaf
|
|
197
195
|
indexMiniGap: DatatypeElephino = state.indexMiniGap
|
|
198
196
|
leaf1ndex: DatatypeLeavesTotal = state.leaf1ndex
|
|
199
|
-
leafConnectee:
|
|
197
|
+
leafConnectee: DatatypeLeavesTotal = state.leafConnectee
|
|
200
198
|
taskIndex: DatatypeLeavesTotal = state.taskIndex
|
|
201
199
|
mapShape, leavesTotal, taskDivisions, concurrencyLimit, connectionGraph, dimensionsTotal, countDimensionsGapped, dimensionsUnconstrained, gapRangeStart, gapsWhere, leafAbove, leafBelow, foldGroups, foldsTotal, gap1ndex, gap1ndexCeiling, groupsOfFolds, indexDimension, indexLeaf, indexMiniGap, leaf1ndex, leafConnectee, taskIndex = countSequential(mapShape, leavesTotal, taskDivisions, concurrencyLimit, connectionGraph, dimensionsTotal, countDimensionsGapped, dimensionsUnconstrained, gapRangeStart, gapsWhere, leafAbove, leafBelow, foldGroups, foldsTotal, gap1ndex, gap1ndexCeiling, groupsOfFolds, indexDimension, indexLeaf, indexMiniGap, leaf1ndex, leafConnectee, taskIndex)
|
|
202
200
|
state = ComputationState(mapShape=mapShape, leavesTotal=leavesTotal, taskDivisions=taskDivisions, concurrencyLimit=concurrencyLimit, countDimensionsGapped=countDimensionsGapped, dimensionsUnconstrained=dimensionsUnconstrained, gapRangeStart=gapRangeStart, gapsWhere=gapsWhere, leafAbove=leafAbove, leafBelow=leafBelow, foldGroups=foldGroups, foldsTotal=foldsTotal, gap1ndex=gap1ndex, gap1ndexCeiling=gap1ndexCeiling, groupsOfFolds=groupsOfFolds, indexDimension=indexDimension, indexLeaf=indexLeaf, indexMiniGap=indexMiniGap, leaf1ndex=leaf1ndex, leafConnectee=leafConnectee, taskIndex=taskIndex)
|
mapFolding/theDao.py
CHANGED
|
@@ -1,23 +1,22 @@
|
|
|
1
1
|
"""
|
|
2
2
|
Core computational algorithm for map folding counting and enumeration.
|
|
3
3
|
|
|
4
|
-
This module implements the core algorithms for enumerating and counting the various ways
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
- A "
|
|
15
|
-
-
|
|
16
|
-
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
based on task divisions and concurrency parameters.
|
|
4
|
+
This module implements the core algorithms for enumerating and counting the various ways a rectangular map can be
|
|
5
|
+
folded. It uses a functional state-transformation approach, where each function performs a specific state mutation and
|
|
6
|
+
returns the updated state. The module provides three main counting algorithms:
|
|
7
|
+
|
|
8
|
+
1. `countInitialize`: Sets up the initial state for computation.
|
|
9
|
+
2. `countSequential`: Processes the folding computation sequentially.
|
|
10
|
+
3. `countParallel`: Distributes the computation across multiple processes.
|
|
11
|
+
|
|
12
|
+
All algorithms operate on a `ComputationState` object that tracks the folding process, including:
|
|
13
|
+
- A "leaf" is a unit square in the map.
|
|
14
|
+
- A "gap" is a potential position where a new leaf can be folded.
|
|
15
|
+
- Connections track how leaves can connect above/below each other.
|
|
16
|
+
- Leaves are enumerated starting from 1, not 0; hence, `leaf1ndex` not `leafIndex`.
|
|
17
|
+
|
|
18
|
+
The `doTheNeedful` function is the main entry point that orchestrates the computation strategy based on task divisions and
|
|
19
|
+
concurrency parameters.
|
|
21
20
|
"""
|
|
22
21
|
from concurrent.futures import Future as ConcurrentFuture, ProcessPoolExecutor
|
|
23
22
|
from copy import deepcopy
|
|
@@ -144,7 +143,7 @@ def updateLeafConnectee(state: ComputationState) -> ComputationState:
|
|
|
144
143
|
return state
|
|
145
144
|
|
|
146
145
|
def countInitialize(state: ComputationState) -> ComputationState:
|
|
147
|
-
while
|
|
146
|
+
while state.gap1ndex == 0:
|
|
148
147
|
if activeLeafIsTheFirstLeaf(state) or leafBelowSentinelIs1(state):
|
|
149
148
|
state = initializeVariablesToFindGaps(state)
|
|
150
149
|
while loopUpToDimensionsTotal(state):
|
|
@@ -164,8 +163,6 @@ def countInitialize(state: ComputationState) -> ComputationState:
|
|
|
164
163
|
state = incrementIndexMiniGap(state)
|
|
165
164
|
if thereIsAnActiveLeaf(state):
|
|
166
165
|
state = insertLeafAtGap(state)
|
|
167
|
-
if state.gap1ndex > 0:
|
|
168
|
-
break
|
|
169
166
|
return state
|
|
170
167
|
|
|
171
168
|
def countParallel(state: ComputationState) -> ComputationState:
|
|
@@ -218,8 +215,9 @@ def countSequential(state: ComputationState) -> ComputationState:
|
|
|
218
215
|
state = incrementIndexMiniGap(state)
|
|
219
216
|
while noGapsHere(state):
|
|
220
217
|
state = undoLastLeafPlacement(state)
|
|
221
|
-
if state.
|
|
218
|
+
if state.leaf1ndex == 3 and state.groupsOfFolds:
|
|
222
219
|
state.groupsOfFolds *= 2
|
|
220
|
+
# print('break')
|
|
223
221
|
break
|
|
224
222
|
if thereIsAnActiveLeaf(state):
|
|
225
223
|
state = insertLeafAtGap(state)
|