mapFolding 0.9.1__py3-none-any.whl → 0.9.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (36) hide show
  1. mapFolding/Z0Z_flowControl.py +99 -0
  2. mapFolding/__init__.py +63 -60
  3. mapFolding/basecamp.py +40 -35
  4. mapFolding/beDRY.py +72 -66
  5. mapFolding/dataBaskets.py +49 -0
  6. mapFolding/datatypes.py +21 -0
  7. mapFolding/oeis.py +57 -75
  8. mapFolding/reference/__init__.py +2 -2
  9. mapFolding/someAssemblyRequired/__init__.py +6 -4
  10. mapFolding/someAssemblyRequired/_theTypes.py +9 -1
  11. mapFolding/someAssemblyRequired/_tool_Make.py +0 -1
  12. mapFolding/someAssemblyRequired/_tool_Then.py +16 -8
  13. mapFolding/someAssemblyRequired/_toolboxAntecedents.py +111 -35
  14. mapFolding/someAssemblyRequired/_toolboxContainers.py +58 -49
  15. mapFolding/someAssemblyRequired/synthesizeNumbaJob.py +42 -42
  16. mapFolding/someAssemblyRequired/toolboxNumba.py +3 -11
  17. mapFolding/someAssemblyRequired/transformationTools.py +94 -70
  18. mapFolding/syntheticModules/numbaCount.py +9 -11
  19. mapFolding/theDao.py +19 -21
  20. mapFolding/theDaoOfMapFolding.py +142 -0
  21. mapFolding/theSSOT.py +36 -58
  22. mapFolding/toolboxFilesystem.py +29 -38
  23. {mapfolding-0.9.1.dist-info → mapfolding-0.9.3.dist-info}/METADATA +4 -3
  24. mapfolding-0.9.3.dist-info/RECORD +51 -0
  25. {mapfolding-0.9.1.dist-info → mapfolding-0.9.3.dist-info}/WHEEL +1 -1
  26. tests/__init__.py +2 -2
  27. tests/conftest.py +7 -7
  28. tests/test_computations.py +15 -13
  29. tests/test_other.py +0 -7
  30. tests/test_tasks.py +2 -2
  31. mapfolding-0.9.1.dist-info/RECORD +0 -47
  32. /mapFolding/reference/{lunnanNumpy.py → lunnonNumpy.py} +0 -0
  33. /mapFolding/reference/{lunnanWhile.py → lunnonWhile.py} +0 -0
  34. {mapfolding-0.9.1.dist-info → mapfolding-0.9.3.dist-info}/entry_points.txt +0 -0
  35. {mapfolding-0.9.1.dist-info → mapfolding-0.9.3.dist-info}/licenses/LICENSE +0 -0
  36. {mapfolding-0.9.1.dist-info → mapfolding-0.9.3.dist-info}/top_level.txt +0 -0
@@ -119,7 +119,7 @@ def makeDictionaryFunctionDef(module: ast.Module) -> dict[ast_Identifier, ast.Fu
119
119
  A dictionary mapping function identifiers to their AST function definition nodes.
120
120
  """
121
121
  dictionaryIdentifier2FunctionDef: dict[ast_Identifier, ast.FunctionDef] = {}
122
- NodeTourist(lambda node: isinstance(node, ast.FunctionDef), Then.updateKeyValueIn(DOT.name, Then.extractIt, dictionaryIdentifier2FunctionDef)).visit(module) # type: ignore
122
+ NodeTourist(be.FunctionDef, Then.updateKeyValueIn(DOT.name, Then.extractIt, dictionaryIdentifier2FunctionDef)).visit(module)
123
123
  return dictionaryIdentifier2FunctionDef
124
124
 
125
125
  def inlineFunctionDef(identifierToInline: ast_Identifier, module: ast.Module) -> ast.FunctionDef:
@@ -148,7 +148,7 @@ def inlineFunctionDef(identifierToInline: ast_Identifier, module: ast.Module) ->
148
148
  raise ValueError(f"FunctionDefToInline not found in dictionaryIdentifier2FunctionDef: {identifierToInline = }") from ERRORmessage
149
149
 
150
150
  listIdentifiersCalledFunctions: list[ast_Identifier] = []
151
- findIdentifiersToInline = NodeTourist(ifThis.isCallToName, lambda node: Then.appendTo(listIdentifiersCalledFunctions)(DOT.id(DOT.func(node)))) # type: ignore
151
+ findIdentifiersToInline = NodeTourist(findThis = ifThis.isCallToName, doThat = grab.funcDOTidAttribute(Then.appendTo(listIdentifiersCalledFunctions)))
152
152
  findIdentifiersToInline.visit(FunctionDefToInline)
153
153
 
154
154
  dictionary4Inlining: dict[ast_Identifier, ast.FunctionDef] = {}
@@ -169,7 +169,8 @@ def inlineFunctionDef(identifierToInline: ast_Identifier, module: ast.Module) ->
169
169
  if NodeTourist(ifThis.matchesMeButNotAnyDescendant(ifThis.isCall_Identifier(identifier)), Then.extractIt).captureLastMatch(module) is not None:
170
170
  FunctionDefTarget = dictionaryFunctionDef[identifier]
171
171
  if len(FunctionDefTarget.body) == 1:
172
- inliner = NodeChanger(ifThis.isCall_Identifier(identifier), Then.replaceWith(FunctionDefTarget.body[0].value)) # type: ignore
172
+ replacement = NodeTourist(be.Return, Then.extractIt(DOT.value)).captureLastMatch(FunctionDefTarget)
173
+ inliner = NodeChanger(ifThis.isCall_Identifier(identifier), Then.replaceWith(replacement))
173
174
  for astFunctionDef in dictionary4Inlining.values():
174
175
  inliner.visit(astFunctionDef)
175
176
  else:
@@ -179,7 +180,8 @@ def inlineFunctionDef(identifierToInline: ast_Identifier, module: ast.Module) ->
179
180
 
180
181
  for identifier, FunctionDefTarget in dictionary4Inlining.items():
181
182
  if len(FunctionDefTarget.body) == 1:
182
- inliner = NodeChanger(ifThis.isCall_Identifier(identifier), Then.replaceWith(FunctionDefTarget.body[0].value)) # type: ignore
183
+ replacement = NodeTourist(be.Return, Then.extractIt(DOT.value)).captureLastMatch(FunctionDefTarget)
184
+ inliner = NodeChanger(ifThis.isCall_Identifier(identifier), Then.replaceWith(replacement))
183
185
  inliner.visit(FunctionDefToInline)
184
186
  else:
185
187
  inliner = NodeChanger(ifThis.isAssignAndValueIs(ifThis.isCall_Identifier(identifier)),Then.replaceWith(FunctionDefTarget.body[0:-1]))
@@ -286,7 +288,7 @@ class DeReConstructField2ast:
286
288
  self.ast_keyword_field__field = Make.keyword(self.name, self.astName)
287
289
  self.ast_nameDOTname = Make.Attribute(Make.Name(dataclassesDOTdataclassInstance_Identifier), self.name)
288
290
 
289
- sherpa = NodeTourist(ifThis.isAnnAssign_targetIs(ifThis.isName_Identifier(self.name)), Then.extractIt(DOT.annotation)).captureLastMatch(dataclassClassDef) # type: ignore
291
+ sherpa = NodeTourist(ifThis.isAnnAssign_targetIs(ifThis.isName_Identifier(self.name)), Then.extractIt(DOT.annotation)).captureLastMatch(dataclassClassDef)
290
292
  if sherpa is None: raise raiseIfNoneGitHubIssueNumber3
291
293
  else: self.astAnnotation = sherpa
292
294
 
@@ -422,7 +424,7 @@ def write_astModule(ingredients: IngredientsModule, pathFilename: PathLike[Any]
422
424
  4. Optimizes imports using autoflake
423
425
  5. Writes the final source code to the specified file location
424
426
 
425
- This is typically the final step in the code generation pipeline,
427
+ This is typically the final step in the code generation assembly line,
426
428
  producing optimized Python modules ready for execution.
427
429
 
428
430
  Parameters:
@@ -441,62 +443,25 @@ def write_astModule(ingredients: IngredientsModule, pathFilename: PathLike[Any]
441
443
  if packageName:
442
444
  autoflake_additional_imports.append(packageName)
443
445
  pythonSource = autoflake_fix_code(pythonSource, autoflake_additional_imports, expand_star_imports=False, remove_all_unused_imports=True, remove_duplicate_keys = False, remove_unused_variables = False)
444
- # pythonSource = python_minifier.minify(pythonSource)
446
+ # pythonSource = python_minifier.minify(pythonSource, remove_annotations=False, hoist_literals=False)
445
447
  writeStringToHere(pythonSource, pathFilename)
446
448
 
447
449
  # END of acceptable classes and functions ======================================================
448
- def removeUnusedParameters(ingredientsFunction: IngredientsFunction):
450
+ def removeUnusedParameters(ingredientsFunction: IngredientsFunction) -> IngredientsFunction:
449
451
  list_argCuzMyBrainRefusesToThink = ingredientsFunction.astFunctionDef.args.args + ingredientsFunction.astFunctionDef.args.posonlyargs + ingredientsFunction.astFunctionDef.args.kwonlyargs
450
452
  list_arg_arg: list[ast_Identifier] = [ast_arg.arg for ast_arg in list_argCuzMyBrainRefusesToThink]
451
453
  listName: list[ast.Name] = []
452
- NodeTourist(be.Name, Then.appendTo(listName)).visit(ingredientsFunction.astFunctionDef)
454
+ fauxFunctionDef = deepcopy(ingredientsFunction.astFunctionDef)
455
+ NodeChanger(be.Return, Then.removeIt).visit(fauxFunctionDef)
456
+ NodeTourist(be.Name, Then.appendTo(listName)).visit(fauxFunctionDef)
453
457
  list_Identifiers: list[ast_Identifier] = [astName.id for astName in listName]
454
458
  list_IdentifiersNotUsed: list[ast_Identifier] = list(set(list_arg_arg) - set(list_Identifiers))
455
-
456
- dictionaryEstimates: dict[tuple[int, ...], int] = {
457
- (2,2,2,2,2,2,2,2): 798148657152000,
458
- (2,21): 776374224866624,
459
- (3,15): 824761667826225,
460
- (3,3,3,3): 85109616000000000000000000000000,
461
- (8,8): 791274195985524900,
462
- }
463
-
464
- # END of marginal classes and functions ======================================================
465
- def Z0Z_lameFindReplace(astTree: 个, mappingFindReplaceNodes: Mapping[ast.AST, ast.AST]) -> 个:
466
- """
467
- Recursively replace AST nodes based on a mapping of find-replace pairs.
468
-
469
- This function applies brute-force node replacement throughout an AST tree
470
- by comparing textual representations of nodes. While not the most efficient
471
- approach, it provides a reliable way to replace complex nested structures
472
- when more precise targeting methods are difficult to implement.
473
-
474
- The function continues replacing nodes until no more changes are detected
475
- in the AST's textual representation, ensuring complete replacement throughout
476
- the tree structure.
477
-
478
- Parameters:
479
- astTree: The AST structure to modify.
480
- mappingFindReplaceNodes: A mapping from source nodes to replacement nodes.
481
-
482
- Returns:
483
- The modified AST structure with all matching nodes replaced.
484
- """
485
- keepGoing = True
486
- newTree = deepcopy(astTree)
487
-
488
- while keepGoing:
489
- for nodeFind, nodeReplace in mappingFindReplaceNodes.items():
490
- NodeChanger(ifThis.Z0Z_unparseIs(nodeFind), Then.replaceWith(nodeReplace)).visit(newTree)
491
-
492
- if ast.unparse(newTree) == ast.unparse(astTree):
493
- keepGoing = False
494
- else:
495
- astTree = deepcopy(newTree)
496
- return newTree
459
+ for arg_Identifier in list_IdentifiersNotUsed:
460
+ remove_arg = NodeChanger(ifThis.is_arg_Identifier(arg_Identifier), Then.removeIt)
461
+ remove_arg.visit(ingredientsFunction.astFunctionDef)
462
+ return ingredientsFunction
497
463
 
498
464
  def makeNewFlow(recipeFlow: RecipeSynthesizeFlow) -> IngredientsModule:
499
- # TODO a tool to automatically remove unused variables from the ArgumentsSpecification (return, and returns) _might_ be nice.
500
465
  # Figure out dynamic flow control to synthesized modules https://github.com/hunterhogan/mapFolding/issues/4
501
466
  listAllIngredientsFunctions = [
502
467
  (ingredientsInitialize := astModuleToIngredientsFunction(recipeFlow.source_astModule, recipeFlow.sourceCallableInitialize)),
@@ -537,7 +502,7 @@ def makeNewFlow(recipeFlow: RecipeSynthesizeFlow) -> IngredientsModule:
537
502
  for ingredients in listAllIngredientsFunctions:
538
503
  for source_Identifier, recipe_Identifier in listFindReplace:
539
504
  updateName = NodeChanger(ifThis.isName_Identifier(source_Identifier) , grab.idAttribute(Then.replaceWith(recipe_Identifier)))
540
- update_arg = NodeChanger(ifThis.isArgument_Identifier(source_Identifier), grab.argAttribute(Then.replaceWith(recipe_Identifier)))
505
+ update_arg = NodeChanger(ifThis.isArgument_Identifier(source_Identifier), grab.argAttribute(Then.replaceWith(recipe_Identifier))) # type: ignore
541
506
  updateName.visit(ingredients.astFunctionDef)
542
507
  update_arg.visit(ingredients.astFunctionDef)
543
508
 
@@ -555,26 +520,27 @@ def makeNewFlow(recipeFlow: RecipeSynthesizeFlow) -> IngredientsModule:
555
520
  # Change callable parameters and Call to the callable at the same time ====
556
521
  # sequentialCallable =========================================================
557
522
  if recipeFlow.removeDataclassSequential:
558
- ingredientsSequential.astFunctionDef.args = Make.argumentsSpecification(args=shatteredDataclass.list_argAnnotated4ArgumentsSpecification)
559
- astCallSequentialCallable = Make.Call(Make.Name(recipeFlow.callableSequential), shatteredDataclass.listName4Parameters)
560
- changeReturnSequentialCallable = NodeChanger(be.Return, Then.replaceWith(Make.Return(shatteredDataclass.fragments4AssignmentOrParameters)))
561
- ingredientsSequential.astFunctionDef.returns = shatteredDataclass.signatureReturnAnnotation
562
- replaceAssignSequentialCallable = NodeChanger(ifThis.isAssignAndValueIs(ifThis.isCall_Identifier(recipeFlow.callableSequential)), Then.replaceWith(Make.Assign(listTargets=[shatteredDataclass.fragments4AssignmentOrParameters], value=astCallSequentialCallable)))
523
+ ingredientsSequential = removeDataclassFromFunction(ingredientsSequential, shatteredDataclass)
524
+ ingredientsDispatcher = unpackDataclassCallFunctionRepackDataclass(ingredientsDispatcher, recipeFlow.callableSequential, shatteredDataclass)
563
525
 
564
- unpack4sequentialCallable = NodeChanger(ifThis.isAssignAndValueIs(ifThis.isCall_Identifier(recipeFlow.callableSequential)), Then.insertThisAbove(shatteredDataclass.listUnpack))
565
- repack4sequentialCallable = NodeChanger(ifThis.isAssignAndValueIs(ifThis.isCall_Identifier(recipeFlow.callableSequential)), Then.insertThisBelow([shatteredDataclass.repack]))
566
-
567
- changeReturnSequentialCallable.visit(ingredientsSequential.astFunctionDef)
568
- replaceAssignSequentialCallable.visit(ingredientsDispatcher.astFunctionDef)
569
- unpack4sequentialCallable.visit(ingredientsDispatcher.astFunctionDef)
570
- repack4sequentialCallable.visit(ingredientsDispatcher.astFunctionDef)
571
-
572
- ingredientsSequential.astFunctionDef = Z0Z_lameFindReplace(ingredientsSequential.astFunctionDef, shatteredDataclass.map_stateDOTfield2Name)
526
+ if recipeFlow.removeDataclassInitialize:
527
+ ingredientsInitialize = removeDataclassFromFunction(ingredientsInitialize, shatteredDataclass)
528
+ ingredientsDispatcher = unpackDataclassCallFunctionRepackDataclass(ingredientsDispatcher, recipeFlow.callableInitialize, shatteredDataclass)
573
529
 
574
530
  # parallelCallable =========================================================
575
531
  if recipeFlow.removeDataclassParallel:
576
532
  ingredientsParallel.astFunctionDef.args = Make.argumentsSpecification(args=shatteredDataclass.list_argAnnotated4ArgumentsSpecification)
577
- replaceCall2concurrencyManager = NodeChanger(ifThis.isCallAttributeNamespace_Identifier(recipeFlow.concurrencyManagerNamespace, recipeFlow.concurrencyManagerIdentifier), Then.replaceWith(Make.Call(Make.Attribute(Make.Name(recipeFlow.concurrencyManagerNamespace), recipeFlow.concurrencyManagerIdentifier), listArguments=[Make.Name(recipeFlow.callableParallel)] + shatteredDataclass.listName4Parameters)))
533
+
534
+ ingredientsParallel.astFunctionDef = Z0Z_lameFindReplace(ingredientsParallel.astFunctionDef, shatteredDataclass.map_stateDOTfield2Name)
535
+
536
+ ingredientsParallel = removeUnusedParameters(ingredientsParallel)
537
+
538
+ list_argCuzMyBrainRefusesToThink = ingredientsParallel.astFunctionDef.args.args + ingredientsParallel.astFunctionDef.args.posonlyargs + ingredientsParallel.astFunctionDef.args.kwonlyargs
539
+ list_arg_arg: list[ast_Identifier] = [ast_arg.arg for ast_arg in list_argCuzMyBrainRefusesToThink]
540
+
541
+ listParameters = [parameter for parameter in shatteredDataclass.listName4Parameters if parameter.id in list_arg_arg]
542
+
543
+ replaceCall2concurrencyManager = NodeChanger(ifThis.isCallAttributeNamespace_Identifier(recipeFlow.concurrencyManagerNamespace, recipeFlow.concurrencyManagerIdentifier), Then.replaceWith(Make.Call(Make.Attribute(Make.Name(recipeFlow.concurrencyManagerNamespace), recipeFlow.concurrencyManagerIdentifier), listArguments=[Make.Name(recipeFlow.callableParallel)] + listParameters)))
578
544
 
579
545
  # NOTE I am dissatisfied with this logic for many reasons, including that it requires separate NodeCollector and NodeReplacer instances.
580
546
  astCallConcurrencyResult: list[ast.Call] = []
@@ -591,16 +557,74 @@ def makeNewFlow(recipeFlow: RecipeSynthesizeFlow) -> IngredientsModule:
591
557
  replaceCall2concurrencyManager.visit(ingredientsDispatcher.astFunctionDef)
592
558
  changeReturnParallelCallable.visit(ingredientsParallel.astFunctionDef)
593
559
 
594
- ingredientsParallel.astFunctionDef = Z0Z_lameFindReplace(ingredientsParallel.astFunctionDef, shatteredDataclass.map_stateDOTfield2Name)
595
-
596
560
  # Module-level transformations ===========================================================
597
561
  ingredientsModuleNumbaUnified = IngredientsModule(ingredientsFunction=listAllIngredientsFunctions, imports=LedgerOfImports(recipeFlow.source_astModule))
598
562
  ingredientsModuleNumbaUnified.removeImportFromModule('numpy')
599
563
 
600
564
  return ingredientsModuleNumbaUnified
601
565
 
566
+ def removeDataclassFromFunction(ingredientsTarget: IngredientsFunction, shatteredDataclass: ShatteredDataclass) -> IngredientsFunction:
567
+ ingredientsTarget.astFunctionDef.args = Make.argumentsSpecification(args=shatteredDataclass.list_argAnnotated4ArgumentsSpecification)
568
+ ingredientsTarget.astFunctionDef.returns = shatteredDataclass.signatureReturnAnnotation
569
+ changeReturnCallable = NodeChanger(be.Return, Then.replaceWith(Make.Return(shatteredDataclass.fragments4AssignmentOrParameters)))
570
+ changeReturnCallable.visit(ingredientsTarget.astFunctionDef)
571
+ ingredientsTarget.astFunctionDef = Z0Z_lameFindReplace(ingredientsTarget.astFunctionDef, shatteredDataclass.map_stateDOTfield2Name)
572
+ return ingredientsTarget
573
+
574
+ def unpackDataclassCallFunctionRepackDataclass(ingredientsCaller: IngredientsFunction, targetCallableIdentifier: ast_Identifier, shatteredDataclass: ShatteredDataclass) -> IngredientsFunction:
575
+ astCallTargetCallable = Make.Call(Make.Name(targetCallableIdentifier), shatteredDataclass.listName4Parameters)
576
+ replaceAssignTargetCallable = NodeChanger(ifThis.isAssignAndValueIs(ifThis.isCall_Identifier(targetCallableIdentifier)), Then.replaceWith(Make.Assign(listTargets=[shatteredDataclass.fragments4AssignmentOrParameters], value=astCallTargetCallable)))
577
+ unpack4targetCallable = NodeChanger(ifThis.isAssignAndValueIs(ifThis.isCall_Identifier(targetCallableIdentifier)), Then.insertThisAbove(shatteredDataclass.listUnpack))
578
+ repack4targetCallable = NodeChanger(ifThis.isAssignAndValueIs(ifThis.isCall_Identifier(targetCallableIdentifier)), Then.insertThisBelow([shatteredDataclass.repack]))
579
+ replaceAssignTargetCallable.visit(ingredientsCaller.astFunctionDef)
580
+ unpack4targetCallable.visit(ingredientsCaller.astFunctionDef)
581
+ repack4targetCallable.visit(ingredientsCaller.astFunctionDef)
582
+ return ingredientsCaller
583
+
602
584
  def getIt(astCallConcurrencyResult: list[ast.Call]) -> Callable[[ast.AST], ast.AST]:
603
585
  def workhorse(node: ast.AST) -> ast.AST:
604
586
  NodeTourist(be.Call, Then.appendTo(astCallConcurrencyResult)).visit(node)
605
587
  return node
606
588
  return workhorse
589
+
590
+ dictionaryEstimates: dict[tuple[int, ...], int] = {
591
+ (2,2,2,2,2,2,2,2): 798148657152000,
592
+ (2,21): 776374224866624,
593
+ (3,15): 824761667826225,
594
+ (3,3,3,3): 85109616000000000000000000000000,
595
+ (8,8): 791274195985524900,
596
+ }
597
+
598
+ # END of marginal classes and functions ======================================================
599
+ def Z0Z_lameFindReplace(astTree: 个, mappingFindReplaceNodes: Mapping[ast.AST, ast.AST]) -> 个:
600
+ """
601
+ Recursively replace AST nodes based on a mapping of find-replace pairs.
602
+
603
+ This function applies brute-force node replacement throughout an AST tree
604
+ by comparing textual representations of nodes. While not the most efficient
605
+ approach, it provides a reliable way to replace complex nested structures
606
+ when more precise targeting methods are difficult to implement.
607
+
608
+ The function continues replacing nodes until no more changes are detected
609
+ in the AST's textual representation, ensuring complete replacement throughout
610
+ the tree structure.
611
+
612
+ Parameters:
613
+ astTree: The AST structure to modify.
614
+ mappingFindReplaceNodes: A mapping from source nodes to replacement nodes.
615
+
616
+ Returns:
617
+ The modified AST structure with all matching nodes replaced.
618
+ """
619
+ keepGoing = True
620
+ newTree = deepcopy(astTree)
621
+
622
+ while keepGoing:
623
+ for nodeFind, nodeReplace in mappingFindReplaceNodes.items():
624
+ NodeChanger(ifThis.Z0Z_unparseIs(nodeFind), Then.replaceWith(nodeReplace)).visit(newTree)
625
+
626
+ if ast.unparse(newTree) == ast.unparse(astTree):
627
+ keepGoing = False
628
+ else:
629
+ astTree = deepcopy(newTree)
630
+ return newTree
@@ -4,7 +4,7 @@ from mapFolding.theSSOT import Array1DElephino, Array1DFoldsTotal, Array1DLeaves
4
4
  from numba import jit
5
5
 
6
6
  def countInitialize(state: ComputationState) -> ComputationState:
7
- while state.leaf1ndex > 0:
7
+ while state.gap1ndex == 0:
8
8
  if state.leaf1ndex <= 1 or state.leafBelow[0] == 1:
9
9
  state.dimensionsUnconstrained = state.dimensionsTotal
10
10
  state.gap1ndexCeiling = state.gapRangeStart[state.leaf1ndex - 1]
@@ -42,12 +42,10 @@ def countInitialize(state: ComputationState) -> ComputationState:
42
42
  state.leafAbove[state.leafBelow[state.leaf1ndex]] = state.leaf1ndex
43
43
  state.gapRangeStart[state.leaf1ndex] = state.gap1ndex
44
44
  state.leaf1ndex += 1
45
- if state.gap1ndex > 0:
46
- break
47
45
  return state
48
46
 
49
47
  @jit(_nrt=True, boundscheck=False, cache=True, error_model='numpy', fastmath=True, forceinline=True, inline='always', looplift=False, no_cfunc_wrapper=False, no_cpython_wrapper=False, nopython=True, parallel=False)
50
- def countParallel(mapShape: tuple[DatatypeLeavesTotal, ...], leavesTotal: DatatypeLeavesTotal, taskDivisions: DatatypeLeavesTotal, concurrencyLimit: DatatypeElephino, connectionGraph: Array3D, dimensionsTotal: DatatypeLeavesTotal, countDimensionsGapped: Array1DLeavesTotal, dimensionsUnconstrained: DatatypeLeavesTotal, gapRangeStart: Array1DElephino, gapsWhere: Array1DLeavesTotal, leafAbove: Array1DLeavesTotal, leafBelow: Array1DLeavesTotal, foldGroups: Array1DFoldsTotal, foldsTotal: DatatypeFoldsTotal, gap1ndex: DatatypeLeavesTotal, gap1ndexCeiling: DatatypeElephino, groupsOfFolds: DatatypeFoldsTotal, indexDimension: DatatypeLeavesTotal, indexLeaf: DatatypeLeavesTotal, indexMiniGap: DatatypeElephino, leaf1ndex: DatatypeLeavesTotal, leafConnectee: DatatypeElephino, taskIndex: DatatypeLeavesTotal) -> DatatypeFoldsTotal:
48
+ def countParallel(leavesTotal: DatatypeLeavesTotal, taskDivisions: DatatypeLeavesTotal, connectionGraph: Array3D, dimensionsTotal: DatatypeLeavesTotal, countDimensionsGapped: Array1DLeavesTotal, dimensionsUnconstrained: DatatypeLeavesTotal, gapRangeStart: Array1DElephino, gapsWhere: Array1DLeavesTotal, leafAbove: Array1DLeavesTotal, leafBelow: Array1DLeavesTotal, foldGroups: Array1DFoldsTotal, gap1ndex: DatatypeElephino, gap1ndexCeiling: DatatypeElephino, groupsOfFolds: DatatypeFoldsTotal, indexDimension: DatatypeLeavesTotal, indexMiniGap: DatatypeElephino, leaf1ndex: DatatypeLeavesTotal, leafConnectee: DatatypeLeavesTotal, taskIndex: DatatypeLeavesTotal) -> DatatypeFoldsTotal:
51
49
  while leaf1ndex > 0:
52
50
  if leaf1ndex <= 1 or leafBelow[0] == 1:
53
51
  if leaf1ndex > leavesTotal:
@@ -92,7 +90,7 @@ def countParallel(mapShape: tuple[DatatypeLeavesTotal, ...], leavesTotal: Dataty
92
90
  return groupsOfFolds
93
91
 
94
92
  @jit(_nrt=True, boundscheck=False, cache=True, error_model='numpy', fastmath=True, forceinline=True, inline='always', looplift=False, no_cfunc_wrapper=False, no_cpython_wrapper=False, nopython=True, parallel=False)
95
- def countSequential(mapShape: tuple[DatatypeLeavesTotal, ...], leavesTotal: DatatypeLeavesTotal, taskDivisions: DatatypeLeavesTotal, concurrencyLimit: DatatypeElephino, connectionGraph: Array3D, dimensionsTotal: DatatypeLeavesTotal, countDimensionsGapped: Array1DLeavesTotal, dimensionsUnconstrained: DatatypeLeavesTotal, gapRangeStart: Array1DElephino, gapsWhere: Array1DLeavesTotal, leafAbove: Array1DLeavesTotal, leafBelow: Array1DLeavesTotal, foldGroups: Array1DFoldsTotal, foldsTotal: DatatypeFoldsTotal, gap1ndex: DatatypeLeavesTotal, gap1ndexCeiling: DatatypeElephino, groupsOfFolds: DatatypeFoldsTotal, indexDimension: DatatypeLeavesTotal, indexLeaf: DatatypeLeavesTotal, indexMiniGap: DatatypeElephino, leaf1ndex: DatatypeLeavesTotal, leafConnectee: DatatypeElephino, taskIndex: DatatypeLeavesTotal) -> tuple[tuple[DatatypeLeavesTotal, ...], DatatypeLeavesTotal, DatatypeLeavesTotal, DatatypeElephino, Array3D, DatatypeLeavesTotal, Array1DLeavesTotal, DatatypeLeavesTotal, Array1DElephino, Array1DLeavesTotal, Array1DLeavesTotal, Array1DLeavesTotal, Array1DFoldsTotal, DatatypeFoldsTotal, DatatypeLeavesTotal, DatatypeElephino, DatatypeFoldsTotal, DatatypeLeavesTotal, DatatypeLeavesTotal, DatatypeElephino, DatatypeLeavesTotal, DatatypeElephino, DatatypeLeavesTotal]:
93
+ def countSequential(mapShape: tuple[DatatypeLeavesTotal, ...], leavesTotal: DatatypeLeavesTotal, taskDivisions: DatatypeLeavesTotal, concurrencyLimit: DatatypeElephino, connectionGraph: Array3D, dimensionsTotal: DatatypeLeavesTotal, countDimensionsGapped: Array1DLeavesTotal, dimensionsUnconstrained: DatatypeLeavesTotal, gapRangeStart: Array1DElephino, gapsWhere: Array1DLeavesTotal, leafAbove: Array1DLeavesTotal, leafBelow: Array1DLeavesTotal, foldGroups: Array1DFoldsTotal, foldsTotal: DatatypeFoldsTotal, gap1ndex: DatatypeElephino, gap1ndexCeiling: DatatypeElephino, groupsOfFolds: DatatypeFoldsTotal, indexDimension: DatatypeLeavesTotal, indexLeaf: DatatypeLeavesTotal, indexMiniGap: DatatypeElephino, leaf1ndex: DatatypeLeavesTotal, leafConnectee: DatatypeLeavesTotal, taskIndex: DatatypeLeavesTotal) -> tuple[tuple[DatatypeLeavesTotal, ...], DatatypeLeavesTotal, DatatypeLeavesTotal, DatatypeElephino, Array3D, DatatypeLeavesTotal, Array1DLeavesTotal, DatatypeLeavesTotal, Array1DElephino, Array1DLeavesTotal, Array1DLeavesTotal, Array1DLeavesTotal, Array1DFoldsTotal, DatatypeFoldsTotal, DatatypeElephino, DatatypeElephino, DatatypeFoldsTotal, DatatypeLeavesTotal, DatatypeLeavesTotal, DatatypeElephino, DatatypeLeavesTotal, DatatypeLeavesTotal, DatatypeLeavesTotal]:
96
94
  while leaf1ndex > 0:
97
95
  if leaf1ndex <= 1 or leafBelow[0] == 1:
98
96
  if leaf1ndex > leavesTotal:
@@ -124,7 +122,7 @@ def countSequential(mapShape: tuple[DatatypeLeavesTotal, ...], leavesTotal: Data
124
122
  leaf1ndex -= 1
125
123
  leafBelow[leafAbove[leaf1ndex]] = leafBelow[leaf1ndex]
126
124
  leafAbove[leafBelow[leaf1ndex]] = leafAbove[leaf1ndex]
127
- if groupsOfFolds and leaf1ndex == 3:
125
+ if leaf1ndex == 3 and groupsOfFolds:
128
126
  groupsOfFolds *= 2
129
127
  break
130
128
  if leaf1ndex > 0:
@@ -161,16 +159,16 @@ def doTheNeedful(state: ComputationState) -> ComputationState:
161
159
  leafBelow: Array1DLeavesTotal = state.leafBelow
162
160
  foldGroups: Array1DFoldsTotal = state.foldGroups
163
161
  foldsTotal: DatatypeFoldsTotal = state.foldsTotal
164
- gap1ndex: DatatypeLeavesTotal = state.gap1ndex
162
+ gap1ndex: DatatypeElephino = state.gap1ndex
165
163
  gap1ndexCeiling: DatatypeElephino = state.gap1ndexCeiling
166
164
  groupsOfFolds: DatatypeFoldsTotal = state.groupsOfFolds
167
165
  indexDimension: DatatypeLeavesTotal = state.indexDimension
168
166
  indexLeaf: DatatypeLeavesTotal = state.indexLeaf
169
167
  indexMiniGap: DatatypeElephino = state.indexMiniGap
170
168
  leaf1ndex: DatatypeLeavesTotal = state.leaf1ndex
171
- leafConnectee: DatatypeElephino = state.leafConnectee
169
+ leafConnectee: DatatypeLeavesTotal = state.leafConnectee
172
170
  taskIndex: DatatypeLeavesTotal = state.taskIndex
173
- dictionaryConcurrency[indexSherpa] = concurrencyManager.submit(countParallel, mapShape, leavesTotal, taskDivisions, concurrencyLimit, connectionGraph, dimensionsTotal, countDimensionsGapped, dimensionsUnconstrained, gapRangeStart, gapsWhere, leafAbove, leafBelow, foldGroups, foldsTotal, gap1ndex, gap1ndexCeiling, groupsOfFolds, indexDimension, indexLeaf, indexMiniGap, leaf1ndex, leafConnectee, taskIndex)
171
+ dictionaryConcurrency[indexSherpa] = concurrencyManager.submit(countParallel, leavesTotal, taskDivisions, connectionGraph, dimensionsTotal, countDimensionsGapped, dimensionsUnconstrained, gapRangeStart, gapsWhere, leafAbove, leafBelow, foldGroups, gap1ndex, gap1ndexCeiling, groupsOfFolds, indexDimension, indexMiniGap, leaf1ndex, leafConnectee, taskIndex)
174
172
  for indexSherpa in range(stateParallel.taskDivisions):
175
173
  stateParallel.foldGroups[indexSherpa] = dictionaryConcurrency[indexSherpa].result()
176
174
  state = stateParallel
@@ -189,14 +187,14 @@ def doTheNeedful(state: ComputationState) -> ComputationState:
189
187
  leafBelow: Array1DLeavesTotal = state.leafBelow
190
188
  foldGroups: Array1DFoldsTotal = state.foldGroups
191
189
  foldsTotal: DatatypeFoldsTotal = state.foldsTotal
192
- gap1ndex: DatatypeLeavesTotal = state.gap1ndex
190
+ gap1ndex: DatatypeElephino = state.gap1ndex
193
191
  gap1ndexCeiling: DatatypeElephino = state.gap1ndexCeiling
194
192
  groupsOfFolds: DatatypeFoldsTotal = state.groupsOfFolds
195
193
  indexDimension: DatatypeLeavesTotal = state.indexDimension
196
194
  indexLeaf: DatatypeLeavesTotal = state.indexLeaf
197
195
  indexMiniGap: DatatypeElephino = state.indexMiniGap
198
196
  leaf1ndex: DatatypeLeavesTotal = state.leaf1ndex
199
- leafConnectee: DatatypeElephino = state.leafConnectee
197
+ leafConnectee: DatatypeLeavesTotal = state.leafConnectee
200
198
  taskIndex: DatatypeLeavesTotal = state.taskIndex
201
199
  mapShape, leavesTotal, taskDivisions, concurrencyLimit, connectionGraph, dimensionsTotal, countDimensionsGapped, dimensionsUnconstrained, gapRangeStart, gapsWhere, leafAbove, leafBelow, foldGroups, foldsTotal, gap1ndex, gap1ndexCeiling, groupsOfFolds, indexDimension, indexLeaf, indexMiniGap, leaf1ndex, leafConnectee, taskIndex = countSequential(mapShape, leavesTotal, taskDivisions, concurrencyLimit, connectionGraph, dimensionsTotal, countDimensionsGapped, dimensionsUnconstrained, gapRangeStart, gapsWhere, leafAbove, leafBelow, foldGroups, foldsTotal, gap1ndex, gap1ndexCeiling, groupsOfFolds, indexDimension, indexLeaf, indexMiniGap, leaf1ndex, leafConnectee, taskIndex)
202
200
  state = ComputationState(mapShape=mapShape, leavesTotal=leavesTotal, taskDivisions=taskDivisions, concurrencyLimit=concurrencyLimit, countDimensionsGapped=countDimensionsGapped, dimensionsUnconstrained=dimensionsUnconstrained, gapRangeStart=gapRangeStart, gapsWhere=gapsWhere, leafAbove=leafAbove, leafBelow=leafBelow, foldGroups=foldGroups, foldsTotal=foldsTotal, gap1ndex=gap1ndex, gap1ndexCeiling=gap1ndexCeiling, groupsOfFolds=groupsOfFolds, indexDimension=indexDimension, indexLeaf=indexLeaf, indexMiniGap=indexMiniGap, leaf1ndex=leaf1ndex, leafConnectee=leafConnectee, taskIndex=taskIndex)
mapFolding/theDao.py CHANGED
@@ -1,23 +1,22 @@
1
1
  """
2
2
  Core computational algorithm for map folding counting and enumeration.
3
3
 
4
- This module implements the core algorithms for enumerating and counting the various ways
5
- a rectangular map can be folded. It uses a functional state-transformation approach, where
6
- each function performs a specific state mutation and returns the updated state. The module
7
- provides three main counting algorithms:
8
-
9
- 1. countInitialize: Sets up the initial state for computation
10
- 2. countSequential: Processes the folding computation sequentially
11
- 3. countParallel: Distributes the computation across multiple processes
12
-
13
- All algorithms operate on a ComputationState object that tracks the folding process, including:
14
- - A "leaf" is a unit square in the map
15
- - A "gap" is a potential position where a new leaf can be folded
16
- - Connections track how leaves can connect above/below each other
17
- - Leaves are enumerated starting from 1, not 0; hence, leaf1ndex not leafIndex
18
-
19
- The doTheNeedful function is the main entry point that orchestrates the computation strategy
20
- based on task divisions and concurrency parameters.
4
+ This module implements the core algorithms for enumerating and counting the various ways a rectangular map can be
5
+ folded. It uses a functional state-transformation approach, where each function performs a specific state mutation and
6
+ returns the updated state. The module provides three main counting algorithms:
7
+
8
+ 1. `countInitialize`: Sets up the initial state for computation.
9
+ 2. `countSequential`: Processes the folding computation sequentially.
10
+ 3. `countParallel`: Distributes the computation across multiple processes.
11
+
12
+ All algorithms operate on a `ComputationState` object that tracks the folding process, including:
13
+ - A "leaf" is a unit square in the map.
14
+ - A "gap" is a potential position where a new leaf can be folded.
15
+ - Connections track how leaves can connect above/below each other.
16
+ - Leaves are enumerated starting from 1, not 0; hence, `leaf1ndex` not `leafIndex`.
17
+
18
+ The `doTheNeedful` function is the main entry point that orchestrates the computation strategy based on task divisions and
19
+ concurrency parameters.
21
20
  """
22
21
  from concurrent.futures import Future as ConcurrentFuture, ProcessPoolExecutor
23
22
  from copy import deepcopy
@@ -144,7 +143,7 @@ def updateLeafConnectee(state: ComputationState) -> ComputationState:
144
143
  return state
145
144
 
146
145
  def countInitialize(state: ComputationState) -> ComputationState:
147
- while activeLeafGreaterThan0(state):
146
+ while state.gap1ndex == 0:
148
147
  if activeLeafIsTheFirstLeaf(state) or leafBelowSentinelIs1(state):
149
148
  state = initializeVariablesToFindGaps(state)
150
149
  while loopUpToDimensionsTotal(state):
@@ -164,8 +163,6 @@ def countInitialize(state: ComputationState) -> ComputationState:
164
163
  state = incrementIndexMiniGap(state)
165
164
  if thereIsAnActiveLeaf(state):
166
165
  state = insertLeafAtGap(state)
167
- if state.gap1ndex > 0:
168
- break
169
166
  return state
170
167
 
171
168
  def countParallel(state: ComputationState) -> ComputationState:
@@ -218,8 +215,9 @@ def countSequential(state: ComputationState) -> ComputationState:
218
215
  state = incrementIndexMiniGap(state)
219
216
  while noGapsHere(state):
220
217
  state = undoLastLeafPlacement(state)
221
- if state.groupsOfFolds and state.leaf1ndex == 3:
218
+ if state.leaf1ndex == 3 and state.groupsOfFolds:
222
219
  state.groupsOfFolds *= 2
220
+ # print('break')
223
221
  break
224
222
  if thereIsAnActiveLeaf(state):
225
223
  state = insertLeafAtGap(state)
@@ -0,0 +1,142 @@
1
+ from mapFolding.dataBaskets import MapFoldingState
2
+
3
+ def activeLeafGreaterThan0(state: MapFoldingState) -> bool:
4
+ return state.leaf1ndex > 0
5
+
6
+ def activeLeafGreaterThanLeavesTotal(state: MapFoldingState) -> bool:
7
+ return state.leaf1ndex > state.leavesTotal
8
+
9
+ def activeLeafIsTheFirstLeaf(state: MapFoldingState) -> bool:
10
+ return state.leaf1ndex <= 1
11
+
12
+ def activeLeafIsUnconstrainedInAllDimensions(state: MapFoldingState) -> bool:
13
+ return not state.dimensionsUnconstrained
14
+
15
+ def activeLeafUnconstrainedInThisDimension(state: MapFoldingState) -> MapFoldingState:
16
+ state.dimensionsUnconstrained -= 1
17
+ return state
18
+
19
+ def filterCommonGaps(state: MapFoldingState) -> MapFoldingState:
20
+ state.gapsWhere[state.gap1ndex] = state.gapsWhere[state.indexMiniGap]
21
+ if state.countDimensionsGapped[state.gapsWhere[state.indexMiniGap]] == state.dimensionsUnconstrained:
22
+ state = incrementActiveGap(state)
23
+ state.countDimensionsGapped[state.gapsWhere[state.indexMiniGap]] = 0
24
+ return state
25
+
26
+ def gapAvailable(state: MapFoldingState) -> bool:
27
+ return state.leaf1ndex > 0
28
+
29
+ def incrementActiveGap(state: MapFoldingState) -> MapFoldingState:
30
+ state.gap1ndex += 1
31
+ return state
32
+
33
+ def incrementGap1ndexCeiling(state: MapFoldingState) -> MapFoldingState:
34
+ state.gap1ndexCeiling += 1
35
+ return state
36
+
37
+ def incrementIndexMiniGap(state: MapFoldingState) -> MapFoldingState:
38
+ state.indexMiniGap += 1
39
+ return state
40
+
41
+ def initializeIndexMiniGap(state: MapFoldingState) -> MapFoldingState:
42
+ state.indexMiniGap = state.gap1ndex
43
+ return state
44
+
45
+ def initializeVariablesToFindGaps(state: MapFoldingState) -> MapFoldingState:
46
+ state.dimensionsUnconstrained = state.dimensionsTotal
47
+ state.gap1ndexCeiling = state.gapRangeStart[state.leaf1ndex - 1]
48
+ state.indexDimension = 0
49
+ return state
50
+
51
+ def insertActiveLeaf(state: MapFoldingState) -> MapFoldingState:
52
+ state.indexLeaf = 0
53
+ while state.indexLeaf < state.leaf1ndex:
54
+ state.gapsWhere[state.gap1ndexCeiling] = state.indexLeaf
55
+ state.gap1ndexCeiling += 1
56
+ state.indexLeaf += 1
57
+ return state
58
+
59
+ def insertActiveLeafAtGap(state: MapFoldingState) -> MapFoldingState:
60
+ state.gap1ndex -= 1
61
+ state.leafAbove[state.leaf1ndex] = state.gapsWhere[state.gap1ndex]
62
+ state.leafBelow[state.leaf1ndex] = state.leafBelow[state.leafAbove[state.leaf1ndex]]
63
+ state.leafBelow[state.leafAbove[state.leaf1ndex]] = state.leaf1ndex
64
+ state.leafAbove[state.leafBelow[state.leaf1ndex]] = state.leaf1ndex
65
+ state.gapRangeStart[state.leaf1ndex] = state.gap1ndex
66
+ state.leaf1ndex += 1
67
+ return state
68
+
69
+ def leafBelowSentinelIs1(state: MapFoldingState) -> bool:
70
+ return state.leafBelow[0] == 1
71
+
72
+ def leafConnecteeIsActiveLeaf(state: MapFoldingState) -> bool:
73
+ return state.leafConnectee == state.leaf1ndex
74
+
75
+ def lookForGaps(state: MapFoldingState) -> MapFoldingState:
76
+ state.gapsWhere[state.gap1ndexCeiling] = state.leafConnectee
77
+ if state.countDimensionsGapped[state.leafConnectee] == 0:
78
+ state = incrementGap1ndexCeiling(state)
79
+ state.countDimensionsGapped[state.leafConnectee] += 1
80
+ return state
81
+
82
+ def lookupLeafConnecteeInConnectionGraph(state: MapFoldingState) -> MapFoldingState:
83
+ state.leafConnectee = state.connectionGraph[state.indexDimension, state.leaf1ndex, state.leaf1ndex]
84
+ return state
85
+
86
+ def loopingLeavesConnectedToActiveLeaf(state: MapFoldingState) -> bool:
87
+ return state.leafConnectee != state.leaf1ndex
88
+
89
+ def loopingThroughTheDimensions(state: MapFoldingState) -> bool:
90
+ return state.indexDimension < state.dimensionsTotal
91
+
92
+ def loopingToActiveGapCeiling(state: MapFoldingState) -> bool:
93
+ return state.indexMiniGap < state.gap1ndexCeiling
94
+
95
+ def noGapsHere(state: MapFoldingState) -> bool:
96
+ return (state.leaf1ndex > 0) and (state.gap1ndex == state.gapRangeStart[state.leaf1ndex - 1])
97
+
98
+ def tryAnotherLeafConnectee(state: MapFoldingState) -> MapFoldingState:
99
+ state.leafConnectee = state.connectionGraph[state.indexDimension, state.leaf1ndex, state.leafBelow[state.leafConnectee]]
100
+ return state
101
+
102
+ def tryNextDimension(state: MapFoldingState) -> MapFoldingState:
103
+ state.indexDimension += 1
104
+ return state
105
+
106
+ def undoLastLeafPlacement(state: MapFoldingState) -> MapFoldingState:
107
+ state.leaf1ndex -= 1
108
+ state.leafBelow[state.leafAbove[state.leaf1ndex]] = state.leafBelow[state.leaf1ndex]
109
+ state.leafAbove[state.leafBelow[state.leaf1ndex]] = state.leafAbove[state.leaf1ndex]
110
+ return state
111
+
112
+ def count(state: MapFoldingState) -> MapFoldingState:
113
+ while activeLeafGreaterThan0(state):
114
+ if activeLeafIsTheFirstLeaf(state) or leafBelowSentinelIs1(state):
115
+ if activeLeafGreaterThanLeavesTotal(state):
116
+ state.groupsOfFolds += 1
117
+ else:
118
+ state = initializeVariablesToFindGaps(state)
119
+ while loopingThroughTheDimensions(state):
120
+ state = lookupLeafConnecteeInConnectionGraph(state)
121
+ if leafConnecteeIsActiveLeaf(state):
122
+ state = activeLeafUnconstrainedInThisDimension(state)
123
+ else:
124
+ while loopingLeavesConnectedToActiveLeaf(state):
125
+ state = lookForGaps(state)
126
+ state = tryAnotherLeafConnectee(state)
127
+ state = tryNextDimension(state)
128
+ if activeLeafIsUnconstrainedInAllDimensions(state):
129
+ state = insertActiveLeaf(state)
130
+ state = initializeIndexMiniGap(state)
131
+ while loopingToActiveGapCeiling(state):
132
+ state = filterCommonGaps(state)
133
+ state = incrementIndexMiniGap(state)
134
+ while noGapsHere(state):
135
+ state = undoLastLeafPlacement(state)
136
+ if gapAvailable(state):
137
+ state = insertActiveLeafAtGap(state)
138
+ return state
139
+
140
+ def doTheNeedful(state: MapFoldingState) -> MapFoldingState:
141
+ state = count(state)
142
+ return state