mapFolding 0.16.2__py3-none-any.whl → 0.16.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (66) hide show
  1. easyRun/NOTcountingFolds.py +6 -5
  2. easyRun/countFolds.py +1 -1
  3. easyRun/generateAllModules.py +14 -0
  4. easyRun/meanders.py +1 -1
  5. mapFolding/__init__.py +1 -0
  6. mapFolding/_theSSOT.py +3 -2
  7. mapFolding/_theTypes.py +3 -0
  8. mapFolding/algorithms/A086345.py +75 -0
  9. mapFolding/algorithms/oeisIDbyFormula.py +2 -2
  10. mapFolding/algorithms/symmetricFolds.py +36 -0
  11. mapFolding/basecamp.py +80 -149
  12. mapFolding/dataBaskets.py +123 -5
  13. mapFolding/filesystemToolkit.py +4 -32
  14. mapFolding/oeis.py +5 -12
  15. mapFolding/reference/A086345Wu.py +25 -0
  16. mapFolding/reference/matrixMeandersAnalysis/signatures.py +3 -0
  17. mapFolding/someAssemblyRequired/A007822/A007822rawMaterials.py +10 -45
  18. mapFolding/someAssemblyRequired/A007822/_asynchronousAnnex.py +51 -0
  19. mapFolding/someAssemblyRequired/A007822/makeA007822AsynchronousModules.py +36 -195
  20. mapFolding/someAssemblyRequired/A007822/makeA007822Modules.py +42 -44
  21. mapFolding/someAssemblyRequired/RecipeJob.py +78 -18
  22. mapFolding/someAssemblyRequired/__init__.py +3 -8
  23. mapFolding/someAssemblyRequired/_toolkitContainers.py +32 -3
  24. mapFolding/someAssemblyRequired/infoBooth.py +40 -23
  25. mapFolding/someAssemblyRequired/makeJobTheorem2Numba.py +74 -153
  26. mapFolding/someAssemblyRequired/makeJobTheorem2codon.py +56 -88
  27. mapFolding/someAssemblyRequired/makingModules_count.py +10 -12
  28. mapFolding/someAssemblyRequired/makingModules_doTheNeedful.py +6 -68
  29. mapFolding/someAssemblyRequired/{mapFolding → mapFoldingModules}/makeMapFoldingModules.py +24 -30
  30. mapFolding/someAssemblyRequired/meanders/makeMeandersModules.py +8 -6
  31. mapFolding/someAssemblyRequired/toolkitMakeModules.py +2 -2
  32. mapFolding/someAssemblyRequired/toolkitNumba.py +1 -1
  33. mapFolding/someAssemblyRequired/transformationTools.py +10 -12
  34. mapFolding/syntheticModules/A007822/algorithm.py +45 -50
  35. mapFolding/syntheticModules/A007822/asynchronous.py +91 -34
  36. mapFolding/syntheticModules/A007822/initializeState.py +15 -21
  37. mapFolding/syntheticModules/A007822/theorem2.py +16 -22
  38. mapFolding/syntheticModules/A007822/theorem2Numba.py +20 -26
  39. mapFolding/syntheticModules/A007822/theorem2Trimmed.py +17 -23
  40. mapFolding/syntheticModules/countParallelNumba.py +3 -7
  41. mapFolding/syntheticModules/daoOfMapFoldingNumba.py +1 -2
  42. mapFolding/syntheticModules/meanders/bigInt.py +9 -9
  43. mapFolding/syntheticModules/theorem2Numba.py +28 -9
  44. mapFolding/syntheticModules/theorem2Trimmed.py +1 -1
  45. mapFolding/tests/test_computations.py +1 -1
  46. {mapfolding-0.16.2.dist-info → mapfolding-0.16.4.dist-info}/METADATA +4 -1
  47. {mapfolding-0.16.2.dist-info → mapfolding-0.16.4.dist-info}/RECORD +52 -61
  48. mapFolding/_dataPacking.py +0 -68
  49. mapFolding/reference/meandersDumpingGround/A005316intOptimized.py +0 -122
  50. mapFolding/reference/meandersDumpingGround/A005316optimized128bit.py +0 -79
  51. mapFolding/reference/meandersDumpingGround/matrixMeandersBaseline.py +0 -65
  52. mapFolding/reference/meandersDumpingGround/matrixMeandersBaselineAnnex.py +0 -84
  53. mapFolding/reference/meandersDumpingGround/matrixMeandersSimpleQueue.py +0 -90
  54. mapFolding/syntheticModules/A007822/algorithmNumba.py +0 -94
  55. mapFolding/syntheticModules/A007822/asynchronousAnnex.py +0 -66
  56. mapFolding/syntheticModules/A007822/asynchronousAnnexNumba.py +0 -70
  57. mapFolding/syntheticModules/A007822/asynchronousNumba.py +0 -79
  58. mapFolding/syntheticModules/A007822/asynchronousTheorem2.py +0 -65
  59. mapFolding/syntheticModules/A007822/asynchronousTrimmed.py +0 -56
  60. mapFolding/syntheticModules/dataPacking.py +0 -26
  61. mapFolding/syntheticModules/dataPackingA007822.py +0 -92
  62. /mapFolding/someAssemblyRequired/{mapFolding → mapFoldingModules}/__init__.py +0 -0
  63. {mapfolding-0.16.2.dist-info → mapfolding-0.16.4.dist-info}/WHEEL +0 -0
  64. {mapfolding-0.16.2.dist-info → mapfolding-0.16.4.dist-info}/entry_points.txt +0 -0
  65. {mapfolding-0.16.2.dist-info → mapfolding-0.16.4.dist-info}/licenses/LICENSE +0 -0
  66. {mapfolding-0.16.2.dist-info → mapfolding-0.16.4.dist-info}/top_level.txt +0 -0
@@ -30,7 +30,7 @@ from collections.abc import Callable
30
30
  from copy import deepcopy
31
31
  from hunterMakesPy import raiseIfNone
32
32
  from mapFolding.someAssemblyRequired import IfThis
33
- from typing import Any, cast
33
+ from typing import Any, cast, NamedTuple
34
34
  import ast
35
35
  import dataclasses
36
36
 
@@ -39,7 +39,7 @@ dummySubscript = Make.Subscript(Make.Name("dummy"), Make.Name("slice"))
39
39
  dummyTuple = Make.Tuple([Make.Name("dummyElement")])
40
40
 
41
41
  @dataclasses.dataclass
42
- class ShatteredDataclass:
42
+ class ShatteredDataclass: # slots?
43
43
  """Container for decomposed dataclass components organized as AST nodes for code generation.
44
44
 
45
45
  This class holds the decomposed representation of a dataclass, breaking it down into individual
@@ -80,6 +80,9 @@ class ShatteredDataclass:
80
80
  list_keyword_field__field4init: list[ast.keyword] = dataclasses.field(default_factory=list[ast.keyword])
81
81
  """Keyword arguments for dataclass initialization using field=field format."""
82
82
 
83
+ listIdentifiersStaticScalars: list[str] = dataclasses.field(default_factory=list[str])
84
+ """Identifiers of unchanging scalar fields with `init=False`; mutually exclusive with `list_keyword_field__field4init`."""
85
+
83
86
  listAnnotations: list[ast.expr] = dataclasses.field(default_factory=list[ast.expr])
84
87
  """Type annotations for each dataclass field in declaration order."""
85
88
 
@@ -99,7 +102,7 @@ class ShatteredDataclass:
99
102
  """Tuple-based return type annotation for functions returning decomposed field values."""
100
103
 
101
104
  @dataclasses.dataclass
102
- class DeReConstructField2ast:
105
+ class DeReConstructField2ast: # slots?
103
106
  """
104
107
  Transform a dataclass field into AST node representations for code generation.
105
108
 
@@ -234,6 +237,8 @@ class DeReConstructField2ast:
234
237
  dtype_asnameName: ast.Name = self.astAnnotation
235
238
  if dtype_asnameName.id == 'Array3DLeavesTotal':
236
239
  axesSubscript = Make.Subscript(Make.Name('tuple'), Make.Tuple([Make.Name('uint8'), Make.Name('uint8'), Make.Name('uint8')]))
240
+ if dtype_asnameName.id == 'Array2DLeavesTotal':
241
+ axesSubscript = Make.Subscript(Make.Name('tuple'), Make.Tuple([Make.Name('uint8'), Make.Name('uint8')]))
237
242
  ast_expr = Make.Subscript(Make.Name(annotationType), Make.Tuple([axesSubscript, Make.Subscript(Make.Name('dtype'), dtype_asnameName)]))
238
243
  constructor = 'array'
239
244
  self.ledger.addImportFrom_asStr(moduleWithLogicalPath, constructor)
@@ -253,3 +258,27 @@ class DeReConstructField2ast:
253
258
  self.Z0Z_hack = (self.astAnnAssignConstructor, elementConstructor)
254
259
  if isinstance(self.astAnnotation, ast.Name):
255
260
  self.ledger.addImportFrom_asStr(dataclassesDOTdataclassLogicalPathModule, self.astAnnotation.id)
261
+
262
+ class DatatypeConfiguration(NamedTuple):
263
+ """Configuration for mapping framework datatypes to compiled datatypes.
264
+
265
+ This configuration class defines how abstract datatypes used in the map folding framework should be replaced with compiled
266
+ datatypes during code generation. Each configuration specifies the source module, target type name, and optional import alias
267
+ for the transformation.
268
+
269
+ Attributes
270
+ ----------
271
+ datatypeIdentifier : str
272
+ Framework datatype identifier to be replaced.
273
+ typeModule : identifierDotAttribute
274
+ Module containing the target datatype (e.g., 'codon', 'numpy').
275
+ typeIdentifier : str
276
+ Concrete type name in the target module.
277
+ type_asname : str | None = None
278
+ Optional import alias for the type.
279
+ """
280
+
281
+ datatypeIdentifier: str
282
+ typeModule: identifierDotAttribute
283
+ typeIdentifier: str
284
+ type_asname: str | None = None
@@ -11,39 +11,56 @@ on empirical measurements and theoretical analysis of map folding algorithms for
11
11
  specific dimensional configurations.
12
12
  """
13
13
 
14
+ from copy import deepcopy
14
15
  from hunterMakesPy import identifierDotAttribute
15
- from typing import Final
16
+ from typing import Final, TypedDict
16
17
 
17
18
  dictionaryEstimatesMapFolding: Final[dict[tuple[int, ...], int]] = {
18
19
  (2,2,2,2,2,2,2,2): 798148657152000,
19
20
  (2,21): 776374224866624,
20
21
  (3,15): 824761667826225,
21
22
  (3,3,3,3): 85109616000000000000000000000000,
22
- (8,8): 791274195985524900,
23
+ (8,8): 791274195985524900, # A test estimated 300,000 hours to compute.
23
24
  }
24
25
  """Estimates of multidimensional map folding `foldsTotal`."""
25
26
 
26
- identifierCallableSourceDEFAULT: Final[str] = 'count'
27
- """Default identifier for the core counting function in algorithms."""
27
+ class Default(TypedDict):
28
+ """Default identifiers."""
28
29
 
29
- identifierCallableSourceDispatcherDEFAULT: Final[str] = 'doTheNeedful'
30
- """Default identifier for dispatcher functions that route computational tasks."""
30
+ function: dict[str, str]
31
+ logicalPath: dict[str, identifierDotAttribute]
32
+ module: dict[str, str]
33
+ variable: dict[str, str]
31
34
 
32
- identifierCountingDEFAULT: Final[str] = 'groupsOfFolds'
33
- """Default identifier for the primary counting variable in map folding computations."""
34
-
35
- identifierDataclassInstanceDEFAULT: Final[str] = 'state'
36
- """Default variable name for dataclass instances in generated code."""
37
-
38
- identifierModuleDataPackingDEFAULT: Final[str] = 'dataPacking'
39
- """Default identifier for modules containing data packing and unpacking functions."""
40
-
41
- identifierModuleSourceAlgorithmDEFAULT: Final[str] = 'daoOfMapFolding'
42
- """Default identifier for the algorithm source module containing the base implementation."""
43
-
44
- logicalPathInfixAlgorithmDEFAULT: Final[identifierDotAttribute] = 'algorithms'
45
- """Default logical path component for handmade algorithms."""
46
-
47
- logicalPathInfixDEFAULT: Final[identifierDotAttribute] = 'syntheticModules'
48
- """Default logical path component for organizing synthetic generated modules."""
35
+ default = Default(
36
+ function = {
37
+ 'counting': 'count',
38
+ 'dispatcher': 'doTheNeedful',
39
+ 'initializeState': 'transitionOnGroupsOfFolds',
40
+ },
41
+ logicalPath = {
42
+ 'algorithm': 'algorithms',
43
+ 'synthetic': 'syntheticModules',
44
+ },
45
+ module = {
46
+ 'algorithm': 'daoOfMapFolding',
47
+ 'initializeState': 'initializeState',
48
+ },
49
+ variable = {
50
+ 'counting': 'groupsOfFolds',
51
+ 'stateDataclass': 'MapFoldingState',
52
+ 'stateInstance': 'state',
53
+ },
54
+ )
49
55
 
56
+ defaultA007822: Default = deepcopy(default)
57
+ defaultA007822['function']['_processCompletedFutures'] = '_processCompletedFutures'
58
+ defaultA007822['function']['filterAsymmetricFolds'] = 'filterAsymmetricFolds'
59
+ defaultA007822['function']['getSymmetricFoldsTotal'] = 'getSymmetricFoldsTotal'
60
+ defaultA007822['function']['initializeConcurrencyManager'] = 'initializeConcurrencyManager'
61
+ defaultA007822['logicalPath']['assembly'] = 'someAssemblyRequired.A007822'
62
+ defaultA007822['logicalPath']['synthetic'] += '.A007822'
63
+ defaultA007822['module']['algorithm'] = 'algorithm'
64
+ defaultA007822['module']['asynchronous'] = 'asynchronous'
65
+ # defaultA007822['variable']['counting'] = 'symmetricFolds'
66
+ defaultA007822['variable']['stateDataclass'] = 'SymmetricFoldsState'
@@ -1,70 +1,62 @@
1
1
  """
2
2
  Map folding AST transformation system: Specialized job generation and optimization implementation.
3
3
 
4
- This module implements the specialized job generation layer of the map folding AST transformation
5
- system, executing the complete transformation process to convert generic map folding algorithms
6
- into highly optimized, standalone computation modules. Building upon the configuration orchestration
7
- established in the recipe system, this module applies the full sequence of transformations from
8
- pattern recognition through Numba compilation to produce self-contained computational solutions
9
- optimized for specific map dimensions and calculation contexts.
10
-
11
- The transformation implementation addresses the computational demands of map folding research where
12
- calculations can require hours or days to complete. The specialization process converts abstract
13
- algorithms with flexible parameters into concrete, statically-optimized code that leverages
14
- just-in-time compilation for maximum performance. Each generated module targets specific map
15
- shapes and calculation modes, enabling aggressive compiler optimizations based on known constraints
16
- and embedded constants.
17
-
18
- The optimization process executes systematic transformations including static value embedding to
19
- replace parameterized values with compile-time constants, dead code elimination to remove unused
20
- variables and code paths, parameter internalization to convert function parameters into embedded
21
- variables, import optimization to replace generic imports with specific implementations, Numba
22
- decoration with appropriate compilation directives, progress integration for long-running calculations,
23
- and launcher generation for standalone execution entry points.
24
-
25
- The resulting modules represent the culmination of the entire AST transformation system, producing
26
- self-contained Python scripts that execute independently with dramatically improved performance
27
- compared to original generic algorithms while maintaining mathematical correctness and providing
28
- essential progress feedback capabilities for large-scale computational research.
4
+ Each generated module targets a specific map shape and calculation mode.
5
+
6
+ The optimization process executes systematic transformations including static value embedding, dead code elimination, parameter
7
+ internalization to convert function parameters into embedded variables, Numba decoration with appropriate compilation directives,
8
+ progress integration for long-running calculations, and launcher generation for standalone execution entry points.
29
9
  """
30
10
 
31
11
  from astToolkit import (
32
- Be, extractFunctionDef, identifierDotAttribute, IngredientsFunction, IngredientsModule, LedgerOfImports, Make,
33
- NodeChanger, NodeTourist, Then)
12
+ Be, extractFunctionDef, IngredientsFunction, IngredientsModule, Make, NodeChanger, NodeTourist, Then)
34
13
  from astToolkit.transformationTools import write_astModule
35
14
  from hunterMakesPy import autoDecodingRLE, raiseIfNone
36
- from mapFolding import getPathFilenameFoldsTotal, packageSettings
15
+ from mapFolding import DatatypeLeavesTotal, getFoldsTotalKnown, getPathFilenameFoldsTotal, packageSettings
37
16
  from mapFolding.dataBaskets import MapFoldingState
38
- from mapFolding.someAssemblyRequired import IfThis
39
- from mapFolding.someAssemblyRequired.RecipeJob import RecipeJobTheorem2
17
+ from mapFolding.someAssemblyRequired import DatatypeConfiguration, dictionaryEstimatesMapFolding, IfThis
18
+ from mapFolding.someAssemblyRequired.RecipeJob import customizeDatatypeViaImport, RecipeJobTheorem2
40
19
  from mapFolding.someAssemblyRequired.toolkitNumba import decorateCallableWithNumba, parametersNumbaLight, SpicesJobNumba
41
20
  from mapFolding.syntheticModules.initializeState import transitionOnGroupsOfFolds
42
21
  from pathlib import PurePosixPath
43
- from typing import cast, NamedTuple, TYPE_CHECKING
44
- from typing_extensions import TypeIs
22
+ from typing import cast
45
23
  import ast
46
24
 
47
- if TYPE_CHECKING:
48
- from collections.abc import Callable
25
+ # TODO More convergence with `makeJobTheorem2codon`
26
+
27
+ # TODO Dynamically calculate the bitwidth of each datatype. NOTE I've delayed dynamic calculation because I don't know how to
28
+ # calculate what 'elephino' needs. But perhaps I can dynamically calculate 'leavesTotal' and 'foldsTotal' and hardcode 'elephino.'
29
+ # That would probably be an improvement.
30
+ listDatatypeConfigurations: list[DatatypeConfiguration] = [
31
+ DatatypeConfiguration(datatypeIdentifier='DatatypeLeavesTotal', typeModule='numba', typeIdentifier='uint8', type_asname='DatatypeLeavesTotal'),
32
+ DatatypeConfiguration(datatypeIdentifier='DatatypeElephino', typeModule='numba', typeIdentifier='uint16', type_asname='DatatypeElephino'),
33
+ DatatypeConfiguration(datatypeIdentifier='DatatypeFoldsTotal', typeModule='numba', typeIdentifier='uint64', type_asname='DatatypeFoldsTotal'),
34
+ DatatypeConfiguration(datatypeIdentifier='Array1DLeavesTotal', typeModule='numpy', typeIdentifier='uint8', type_asname='Array1DLeavesTotal'),
35
+ DatatypeConfiguration(datatypeIdentifier='Array1DElephino', typeModule='numpy', typeIdentifier='uint16', type_asname='Array1DElephino'),
36
+ DatatypeConfiguration(datatypeIdentifier='Array3DLeavesTotal', typeModule='numpy', typeIdentifier='uint8', type_asname='Array3DLeavesTotal'),
37
+ ]
38
+
39
+ def addLauncher(ingredientsModule: IngredientsModule, ingredientsCount: IngredientsFunction, job: RecipeJobTheorem2) -> tuple[IngredientsModule, IngredientsFunction]:
40
+ """Add a standalone launcher section to a computation module."""
41
+ linesLaunch: str = f"""
42
+ if __name__ == '__main__':
43
+ import time
44
+ timeStart = time.perf_counter()
45
+ foldsTotal = int({job.countCallable}() * {job.state.leavesTotal})
46
+ print(time.perf_counter() - timeStart)
47
+ print('\\nmap {job.state.mapShape} =', foldsTotal)
48
+ writeStream = open('{job.pathFilenameFoldsTotal.as_posix()}', 'w')
49
+ writeStream.write(str(foldsTotal))
50
+ writeStream.close()
51
+ """
52
+ ingredientsModule.appendLauncher(ast.parse(linesLaunch))
53
+ NodeChanger(Be.Return, Then.replaceWith(Make.Return(job.shatteredDataclass.countingVariableName))).visit(ingredientsCount.astFunctionDef)
54
+ ingredientsCount.astFunctionDef.returns = job.shatteredDataclass.countingVariableAnnotation
49
55
 
50
- listIdentifiersStaticValuesHARDCODED: list[str] = ['dimensionsTotal', 'leavesTotal']
56
+ return ingredientsModule, ingredientsCount
51
57
 
52
58
  def addLauncherNumbaProgress(ingredientsModule: IngredientsModule, ingredientsFunction: IngredientsFunction, job: RecipeJobTheorem2, spices: SpicesJobNumba) -> tuple[IngredientsModule, IngredientsFunction]:
53
- """Add progress tracking capabilities to a Numba-optimized function.
54
-
55
- (AI generated docstring)
56
-
57
- This function modifies both the module and the function to integrate Numba-compatible
58
- progress tracking for long-running calculations. It performs several key transformations:
59
-
60
- 1. Adds a progress bar parameter to the function signature
61
- 2. Replaces counting increments with progress bar updates
62
- 3. Creates a launcher section that displays and updates progress
63
- 4. Configures file output to save results upon completion
64
-
65
- The progress tracking is particularly important for map folding calculations
66
- which can take hours or days to complete, providing visual feedback and
67
- estimated completion times.
59
+ """Add a tqdm progress bar to a Numba-optimized function.
68
60
 
69
61
  Parameters
70
62
  ----------
@@ -81,11 +73,12 @@ def addLauncherNumbaProgress(ingredientsModule: IngredientsModule, ingredientsFu
81
73
  -------
82
74
  moduleAndFunction : tuple[IngredientsModule, IngredientsFunction]
83
75
  Modified module and function with integrated progress tracking capabilities.
84
-
85
76
  """
77
+ # TODO When using the progress bar, `count` does not return `groupsOfFolds`, so `count` does not `*= 2`. So, I have to manually
78
+ # change the update value. This should be dynamic.
86
79
  linesLaunch: str = f"""
87
80
  if __name__ == '__main__':
88
- with ProgressBar(total={job.foldsTotalEstimated}, update_interval=2) as statusUpdate:
81
+ with ProgressBar(total={job.foldsTotalEstimated//job.state.leavesTotal}, update_interval=2) as statusUpdate:
89
82
  {job.countCallable}(statusUpdate)
90
83
  foldsTotal = statusUpdate.n * {job.state.leavesTotal}
91
84
  print('\\nmap {job.state.mapShape} =', foldsTotal)
@@ -98,17 +91,16 @@ if __name__ == '__main__':
98
91
  ingredientsModule.imports.addImportFrom_asStr('numba_progress', numba_progressPythonClass)
99
92
  ingredientsModule.imports.addImportFrom_asStr('numba_progress', numba_progressNumbaType)
100
93
 
101
- ast_argNumbaProgress = ast.arg(arg=spices.numbaProgressBarIdentifier, annotation=ast.Name(id=numba_progressPythonClass, ctx=ast.Load()))
94
+ ast_argNumbaProgress = Make.arg(spices.numbaProgressBarIdentifier, annotation=Make.Name(numba_progressPythonClass))
102
95
  ingredientsFunction.astFunctionDef.args.args.append(ast_argNumbaProgress)
103
96
 
104
- findThis: Callable[[ast.AST], TypeIs[ast.AugAssign] | bool] = Be.AugAssign.targetIs(IfThis.isNameIdentifier(job.shatteredDataclass.countingVariableName.id))
105
- doThat: Callable[[ast.AugAssign], ast.Expr] = Then.replaceWith(Make.Expr(Make.Call(Make.Attribute(Make.Name(spices.numbaProgressBarIdentifier),'update'),[Make.Constant(1)])))
106
- countWithProgressBar: NodeChanger[ast.AugAssign, ast.Expr] = NodeChanger(findThis, doThat)
107
- countWithProgressBar.visit(ingredientsFunction.astFunctionDef)
97
+ NodeChanger(
98
+ findThis = Be.AugAssign.targetIs(IfThis.isNameIdentifier(job.shatteredDataclass.countingVariableName.id))
99
+ , doThat = Then.replaceWith(Make.Expr(Make.Call(Make.Attribute(Make.Name(spices.numbaProgressBarIdentifier),'update'),[Make.Constant(2)])))
100
+ ).visit(ingredientsFunction.astFunctionDef)
108
101
 
109
- removeReturnStatement = NodeChanger(Be.Return, Then.removeIt)
110
- removeReturnStatement.visit(ingredientsFunction.astFunctionDef)
111
- ingredientsFunction.astFunctionDef.returns = Make.Constant(value=None)
102
+ NodeChanger(Be.Return, Then.removeIt).visit(ingredientsFunction.astFunctionDef)
103
+ ingredientsFunction.astFunctionDef.returns = Make.Constant(None)
112
104
 
113
105
  ingredientsModule.appendLauncher(ast.parse(linesLaunch))
114
106
 
@@ -143,7 +135,6 @@ def move_arg2FunctionDefDOTbodyAndAssignInitialValues(ingredientsFunction: Ingre
143
135
  -------
144
136
  modifiedFunction : IngredientsFunction
145
137
  The modified function with parameters converted to initialized variables.
146
-
147
138
  """
148
139
  ingredientsFunction.imports.update(job.shatteredDataclass.imports)
149
140
 
@@ -178,9 +169,7 @@ def move_arg2FunctionDefDOTbodyAndAssignInitialValues(ingredientsFunction: Ingre
178
169
 
179
170
  ingredientsFunction.astFunctionDef.body.insert(0, ImaAnnAssign)
180
171
 
181
- findThis: Callable[[ast.AST], TypeIs[ast.arg] | bool] = IfThis.is_argIdentifier(ast_arg.arg)
182
- remove_arg: NodeChanger[ast.arg, None] = NodeChanger(findThis, Then.removeIt)
183
- remove_arg.visit(ingredientsFunction.astFunctionDef)
172
+ NodeChanger(IfThis.is_argIdentifier(ast_arg.arg), Then.removeIt).visit(ingredientsFunction.astFunctionDef)
184
173
 
185
174
  ast.fix_missing_locations(ingredientsFunction.astFunctionDef)
186
175
  return ingredientsFunction
@@ -215,90 +204,28 @@ def makeJobNumba(job: RecipeJobTheorem2, spices: SpicesJobNumba) -> None:
215
204
  Optimization settings including Numba parameters and progress options.
216
205
 
217
206
  """
218
- astFunctionDef: ast.FunctionDef = raiseIfNone(extractFunctionDef(job.source_astModule, job.countCallable))
219
- ingredientsCount: IngredientsFunction = IngredientsFunction(astFunctionDef, LedgerOfImports())
207
+ ingredientsCount: IngredientsFunction = IngredientsFunction(raiseIfNone(extractFunctionDef(job.source_astModule, job.countCallable)))
220
208
 
221
- listIdentifiersStaticValues: list[str] = listIdentifiersStaticValuesHARDCODED
222
- for identifier in listIdentifiersStaticValues:
223
- findThis: Callable[[ast.AST], TypeIs[ast.Name] | bool] = IfThis.isNameIdentifier(identifier)
224
- doThat: Callable[[ast.Name], ast.Constant] = Then.replaceWith(Make.Constant(int(eval(f"job.state.{identifier}")))) # noqa: S307
225
- NodeChanger(findThis, doThat).visit(ingredientsCount.astFunctionDef)
209
+ for identifier in job.shatteredDataclass.listIdentifiersStaticScalars:
210
+ NodeChanger(IfThis.isNameIdentifier(identifier)
211
+ , Then.replaceWith(Make.Constant(int(eval(f"job.state.{identifier}")))) # noqa: S307
212
+ ).visit(ingredientsCount.astFunctionDef)
226
213
 
227
214
  ingredientsModule = IngredientsModule()
228
- # This launcher eliminates the use of one identifier, so run it now and you can dynamically determine which variables are not used
215
+ # TODO Refactor the subtly complicated interactions of these launchers with `move_arg2FunctionDefDOTbodyAndAssignInitialValues`
216
+ # Consider `astToolkit.transformationTools.removeUnusedParameters`.
217
+ # Generalize some parts of the launchers, especially writing to disk. Writing to disk is NOT robust enough. It doesn't even try to make a directory.
229
218
  if spices.useNumbaProgressBar:
230
219
  ingredientsModule, ingredientsCount = addLauncherNumbaProgress(ingredientsModule, ingredientsCount, job, spices)
231
220
  spices.parametersNumba['nogil'] = True
232
221
  else:
233
- linesLaunch: str = f"""
234
- if __name__ == '__main__':
235
- import time
236
- timeStart = time.perf_counter()
237
- foldsTotal = int({job.countCallable}() * {job.state.leavesTotal})
238
- print(time.perf_counter() - timeStart)
239
- print('\\nmap {job.state.mapShape} =', foldsTotal)
240
- writeStream = open('{job.pathFilenameFoldsTotal.as_posix()}', 'w')
241
- writeStream.write(str(foldsTotal))
242
- writeStream.close()
243
- """
244
- # from mapFolding.oeis import getFoldsTotalKnown # noqa: ERA001
245
- # print(foldsTotal == getFoldsTotalKnown({job.state.mapShape})) # noqa: ERA001
246
- ingredientsModule.appendLauncher(ast.parse(linesLaunch))
247
- changeReturnParallelCallable = NodeChanger(Be.Return, Then.replaceWith(Make.Return(job.shatteredDataclass.countingVariableName)))
248
- changeReturnParallelCallable.visit(ingredientsCount.astFunctionDef)
249
- ingredientsCount.astFunctionDef.returns = job.shatteredDataclass.countingVariableAnnotation
222
+ ingredientsModule, ingredientsCount = addLauncher(ingredientsModule, ingredientsCount, job)
250
223
 
251
224
  ingredientsCount = move_arg2FunctionDefDOTbodyAndAssignInitialValues(ingredientsCount, job)
252
- class DatatypeConfig(NamedTuple):
253
- """Configuration for mapping framework datatypes to Numba-compatible types.
254
-
255
- This configuration class defines how abstract datatypes used in the map folding
256
- framework should be replaced with concrete Numba-compatible types during code
257
- generation. Each configuration specifies the source module, target type name,
258
- and optional import alias for the transformation.
259
-
260
- Attributes
261
- ----------
262
- fml : str
263
- Framework datatype identifier to be replaced.
264
- Z0Z_module : identifierDotAttribute
265
- Module containing the target datatype (e.g., 'numba', 'numpy').
266
- Z0Z_type_name : str
267
- Concrete type name in the target module.
268
- Z0Z_asname : str | None = None
269
- Optional import alias for the type.
270
- """
271
-
272
- fml: str
273
- Z0Z_module: identifierDotAttribute
274
- Z0Z_type_name: str
275
- Z0Z_asname: str | None = None
276
-
277
- listDatatypeConfigs: list[DatatypeConfig] = [
278
- DatatypeConfig(fml='DatatypeLeavesTotal', Z0Z_module='numba', Z0Z_type_name='uint8'),
279
- DatatypeConfig(fml='DatatypeElephino', Z0Z_module='numba', Z0Z_type_name='uint16'),
280
- DatatypeConfig(fml='DatatypeFoldsTotal', Z0Z_module='numba', Z0Z_type_name='uint64'),
281
- ]
282
-
283
- for datatypeConfig in listDatatypeConfigs:
284
- ingredientsModule.imports.addImportFrom_asStr(datatypeConfig.Z0Z_module, datatypeConfig.Z0Z_type_name)
285
- statement = Make.Assign(
286
- [Make.Name(datatypeConfig.fml, ast.Store())],
287
- Make.Name(datatypeConfig.Z0Z_type_name)
288
- )
289
- ingredientsModule.appendPrologue(statement=statement)
290
-
291
- ingredientsCount.imports.removeImportFromModule('mapFolding.dataBaskets')
292
225
 
293
- listNumPyTypeConfigs = [
294
- DatatypeConfig(fml='Array1DLeavesTotal', Z0Z_module='numpy', Z0Z_type_name='uint8', Z0Z_asname='Array1DLeavesTotal'),
295
- DatatypeConfig(fml='Array1DElephino', Z0Z_module='numpy', Z0Z_type_name='uint16', Z0Z_asname='Array1DElephino'),
296
- DatatypeConfig(fml='Array3DLeavesTotal', Z0Z_module='numpy', Z0Z_type_name='uint8', Z0Z_asname='Array3DLeavesTotal'),
297
- ]
226
+ ingredientsCount, ingredientsModule = customizeDatatypeViaImport(ingredientsCount, ingredientsModule, listDatatypeConfigurations)
298
227
 
299
- for typeConfig in listNumPyTypeConfigs:
300
- ingredientsCount.imports.removeImportFrom(typeConfig.Z0Z_module, None, typeConfig.fml)
301
- ingredientsCount.imports.addImportFrom_asStr(typeConfig.Z0Z_module, typeConfig.Z0Z_type_name, typeConfig.Z0Z_asname)
228
+ ingredientsCount.imports.removeImportFromModule('mapFolding.dataBaskets')
302
229
 
303
230
  ingredientsCount.astFunctionDef.decorator_list = [] # TODO low-priority, handle this more elegantly
304
231
  ingredientsCount = decorateCallableWithNumba(ingredientsCount, spices.parametersNumba)
@@ -312,26 +239,20 @@ if __name__ == '__main__':
312
239
  - `makeJobNumba` increases optimization especially by limiting its capabilities to just one set of parameters
313
240
  - the synthesized module must run well as a standalone interpreted-Python script
314
241
  - the next major optimization step will (probably) be to use the module synthesized by `makeJobNumba` to compile a standalone executable
315
- - Nevertheless, at each major optimization step, the code is constantly being improved and optimized, so everything must be well organized (read: semantic) and able to handle a range of arbitrary upstream and not disrupt downstream transformations
316
-
317
- Necessary
318
- - Move the function's parameters to the function body,
319
- - initialize identifiers with their state types and values,
320
-
321
- Optimizations
322
- - replace static-valued identifiers with their values
323
- - narrowly focused imports
324
-
325
- Minutia
326
- - do not use `with` statement inside numba jitted code, except to use numba's obj mode
242
+ - Nevertheless, at each major optimization step, the code is constantly being improved and optimized, so everything must be
243
+ well organized (read: semantic) and able to handle a range of arbitrary upstream and not disrupt downstream transformations
327
244
  """
328
245
 
329
- if __name__ == '__main__':
330
- state = transitionOnGroupsOfFolds(MapFoldingState((2,5)))
246
+ def fromMapShape(mapShape: tuple[DatatypeLeavesTotal, ...]) -> None:
247
+ """Generate and write an optimized Numba-compiled map folding module for a specific map shape."""
248
+ state: MapFoldingState = transitionOnGroupsOfFolds(MapFoldingState(mapShape))
249
+ foldsTotalEstimated: int = getFoldsTotalKnown(state.mapShape) or dictionaryEstimatesMapFolding.get(state.mapShape, 0)
331
250
  pathModule = PurePosixPath(packageSettings.pathPackage, 'jobs')
332
251
  pathFilenameFoldsTotal = PurePosixPath(getPathFilenameFoldsTotal(state.mapShape, pathModule))
333
- aJob = RecipeJobTheorem2(state, pathModule=pathModule, pathFilenameFoldsTotal=pathFilenameFoldsTotal)
252
+ aJob = RecipeJobTheorem2(state, pathModule=pathModule, pathFilenameFoldsTotal=pathFilenameFoldsTotal, foldsTotalEstimated=foldsTotalEstimated)
334
253
  spices = SpicesJobNumba(useNumbaProgressBar=True, parametersNumba=parametersNumbaLight)
335
254
  makeJobNumba(aJob, spices)
336
255
 
337
- # TODO Improve this module with lessons learned in `makeJobTheorem2codon`.
256
+ if __name__ == '__main__':
257
+ mapShape: tuple[DatatypeLeavesTotal, ...] = (5,5)
258
+ fromMapShape(mapShape)