mapFolding 0.9.2__py3-none-any.whl → 0.9.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (36) hide show
  1. mapFolding/Z0Z_flowControl.py +117 -0
  2. mapFolding/__init__.py +28 -26
  3. mapFolding/basecamp.py +1 -1
  4. mapFolding/beDRY.py +1 -2
  5. mapFolding/daoOfMapFolding.py +142 -0
  6. mapFolding/dataBaskets.py +49 -0
  7. mapFolding/datatypes.py +21 -0
  8. mapFolding/oeis.py +1 -2
  9. mapFolding/someAssemblyRequired/Z0Z_makeSomeModules.py +226 -0
  10. mapFolding/someAssemblyRequired/__init__.py +12 -2
  11. mapFolding/someAssemblyRequired/_theTypes.py +11 -5
  12. mapFolding/someAssemblyRequired/_tool_Make.py +8 -0
  13. mapFolding/someAssemblyRequired/_tool_Then.py +44 -1
  14. mapFolding/someAssemblyRequired/_toolboxAST.py +57 -0
  15. mapFolding/someAssemblyRequired/_toolboxAntecedents.py +95 -29
  16. mapFolding/someAssemblyRequired/_toolboxContainers.py +59 -53
  17. mapFolding/someAssemblyRequired/_toolboxPython.py +52 -50
  18. mapFolding/someAssemblyRequired/synthesizeNumbaJob.py +10 -9
  19. mapFolding/someAssemblyRequired/toolboxNumba.py +1 -1
  20. mapFolding/someAssemblyRequired/transformationTools.py +40 -58
  21. mapFolding/syntheticModules/dataPacking.py +25 -0
  22. mapFolding/syntheticModules/initializeCount.py +49 -0
  23. mapFolding/syntheticModules/theorem2.py +49 -0
  24. mapFolding/syntheticModules/theorem2Numba.py +51 -0
  25. mapFolding/theSSOT.py +13 -21
  26. {mapfolding-0.9.2.dist-info → mapfolding-0.9.4.dist-info}/METADATA +4 -3
  27. mapfolding-0.9.4.dist-info/RECORD +57 -0
  28. {mapfolding-0.9.2.dist-info → mapfolding-0.9.4.dist-info}/WHEEL +1 -1
  29. tests/__init__.py +2 -2
  30. tests/conftest.py +7 -7
  31. tests/test_computations.py +17 -13
  32. tests/test_tasks.py +2 -2
  33. mapfolding-0.9.2.dist-info/RECORD +0 -47
  34. {mapfolding-0.9.2.dist-info → mapfolding-0.9.4.dist-info}/entry_points.txt +0 -0
  35. {mapfolding-0.9.2.dist-info → mapfolding-0.9.4.dist-info}/licenses/LICENSE +0 -0
  36. {mapfolding-0.9.2.dist-info → mapfolding-0.9.4.dist-info}/top_level.txt +0 -0
@@ -1,23 +1,21 @@
1
1
  """
2
2
  AST Container Classes for Python Code Generation and Transformation
3
3
 
4
- This module provides specialized container classes that organize AST nodes, imports,
5
- and program structure for code generation and transformation. These classes form
6
- the organizational backbone of the code generation system, enabling:
7
-
8
- 1. Tracking and managing imports with LedgerOfImports
9
- 2. Packaging function definitions with their dependencies via IngredientsFunction
10
- 3. Structuring complete modules with IngredientsModule
11
- 4. Configuring code synthesis with RecipeSynthesizeFlow
12
- 5. Organizing decomposed dataclass representations with ShatteredDataclass
13
-
14
- Together, these container classes implement a component-based architecture for
15
- programmatic generation of high-performance code. They maintain a clean separation
16
- between structure and content, allowing transformations to be applied systematically
17
- while preserving relationships between code elements.
18
-
19
- The containers work in conjunction with transformation tools that manipulate the
20
- contained AST nodes to implement specific optimizations and transformations.
4
+ This module provides specialized container classes that organize AST nodes, imports, and program structure for code
5
+ generation and transformation. These classes form the organizational backbone of the code generation system, enabling:
6
+
7
+ 1. Tracking and managing imports with LedgerOfImports.
8
+ 2. Packaging function definitions with their dependencies via IngredientsFunction.
9
+ 3. Structuring complete modules with IngredientsModule.
10
+ 4. Configuring code synthesis with RecipeSynthesizeFlow.
11
+ 5. Organizing decomposed dataclass representations with ShatteredDataclass.
12
+
13
+ Together, these container classes implement a component-based architecture for programmatic generation of
14
+ high-performance code. They maintain a clean separation between structure and content, allowing transformations to be
15
+ applied systematically while preserving relationships between code elements.
16
+
17
+ The containers work in conjunction with transformation tools that manipulate the contained AST nodes to implement
18
+ specific optimizations and transformations.
21
19
  """
22
20
 
23
21
  from collections import defaultdict
@@ -33,30 +31,28 @@ class LedgerOfImports:
33
31
  """
34
32
  Track and manage import statements for programmatically generated code.
35
33
 
36
- LedgerOfImports acts as a registry for import statements, maintaining a clean
37
- separation between the logical structure of imports and their textual representation.
38
- It enables:
34
+ LedgerOfImports acts as a registry for import statements, maintaining a clean separation between the logical
35
+ structure of imports and their textual representation. It enables:
39
36
 
40
- 1. Tracking regular imports and import-from statements
41
- 2. Adding imports programmatically during code transformation
42
- 3. Merging imports from multiple sources
43
- 4. Removing unnecessary or conflicting imports
44
- 5. Generating optimized AST import nodes for the final code
37
+ 1. Tracking regular imports and import-from statements.
38
+ 2. Adding imports programmatically during code transformation.
39
+ 3. Merging imports from multiple sources.
40
+ 4. Removing unnecessary or conflicting imports.
41
+ 5. Generating optimized AST import nodes for the final code.
45
42
 
46
- This class forms the foundation of dependency management in generated code,
47
- ensuring that all required libraries are available without duplication or
48
- conflict.
43
+ This class forms the foundation of dependency management in generated code, ensuring that all required libraries are
44
+ available without duplication or conflict.
49
45
  """
50
46
  # TODO When resolving the ledger of imports, remove self-referential imports
51
- # TODO add TypeIgnore tracking to the ledger of imports
52
47
 
53
- def __init__(self, startWith: ast.AST | None = None) -> None:
48
+ def __init__(self, startWith: ast.AST | None = None, type_ignores: list[ast.TypeIgnore] | None = None) -> None:
54
49
  self.dictionaryImportFrom: dict[str_nameDOTname, list[tuple[ast_Identifier, ast_Identifier | None]]] = defaultdict(list)
55
50
  self.listImport: list[str_nameDOTname] = []
51
+ self.type_ignores = [] if type_ignores is None else list(type_ignores)
56
52
  if startWith:
57
53
  self.walkThis(startWith)
58
54
 
59
- def addAst(self, astImport____: ast.Import | ast.ImportFrom) -> None:
55
+ def addAst(self, astImport____: ast.Import | ast.ImportFrom, type_ignores: list[ast.TypeIgnore] | None = None) -> None:
60
56
  match astImport____:
61
57
  case ast.Import():
62
58
  for alias in astImport____.names:
@@ -69,14 +65,18 @@ class LedgerOfImports:
69
65
  self.dictionaryImportFrom[astImport____.module].append((alias.name, alias.asname))
70
66
  case _:
71
67
  raise ValueError(f"I received {type(astImport____) = }, but I can only accept {ast.Import} and {ast.ImportFrom}.")
68
+ if type_ignores:
69
+ self.type_ignores.extend(type_ignores)
72
70
 
73
- def addImport_asStr(self, moduleWithLogicalPath: str_nameDOTname) -> None:
71
+ def addImport_asStr(self, moduleWithLogicalPath: str_nameDOTname, type_ignores: list[ast.TypeIgnore] | None = None) -> None:
74
72
  self.listImport.append(moduleWithLogicalPath)
73
+ if type_ignores:
74
+ self.type_ignores.extend(type_ignores)
75
75
 
76
- def addImportFrom_asStr(self, moduleWithLogicalPath: str_nameDOTname, name: ast_Identifier, asname: ast_Identifier | None = None) -> None:
77
- if moduleWithLogicalPath not in self.dictionaryImportFrom:
78
- self.dictionaryImportFrom[moduleWithLogicalPath] = []
76
+ def addImportFrom_asStr(self, moduleWithLogicalPath: str_nameDOTname, name: ast_Identifier, asname: ast_Identifier | None = None, type_ignores: list[ast.TypeIgnore] | None = None) -> None:
79
77
  self.dictionaryImportFrom[moduleWithLogicalPath].append((name, asname))
78
+ if type_ignores:
79
+ self.type_ignores.extend(type_ignores)
80
80
 
81
81
  def removeImportFromModule(self, moduleWithLogicalPath: str_nameDOTname) -> None:
82
82
  """Remove all imports from a specific module."""
@@ -126,14 +126,18 @@ class LedgerOfImports:
126
126
  Parameters:
127
127
  *fromLedger: One or more other `LedgerOfImports` objects from which to merge.
128
128
  """
129
- self.dictionaryImportFrom = updateExtendPolishDictionaryLists(self.dictionaryImportFrom, *(ledger.dictionaryImportFrom for ledger in fromLedger), destroyDuplicates=True, reorderLists=True)
129
+ updatedDictionary = updateExtendPolishDictionaryLists(self.dictionaryImportFrom, *(ledger.dictionaryImportFrom for ledger in fromLedger), destroyDuplicates=True, reorderLists=True)
130
+ self.dictionaryImportFrom = defaultdict(list, updatedDictionary)
130
131
  for ledger in fromLedger:
131
132
  self.listImport.extend(ledger.listImport)
133
+ self.type_ignores.extend(ledger.type_ignores)
132
134
 
133
- def walkThis(self, walkThis: ast.AST) -> None:
135
+ def walkThis(self, walkThis: ast.AST, type_ignores: list[ast.TypeIgnore] | None = None) -> None:
134
136
  for nodeBuffalo in ast.walk(walkThis):
135
137
  if isinstance(nodeBuffalo, (ast.Import, ast.ImportFrom)):
136
138
  self.addAst(nodeBuffalo)
139
+ if type_ignores:
140
+ self.type_ignores.extend(type_ignores)
137
141
 
138
142
  # Consolidate settings classes through inheritance https://github.com/hunterhogan/mapFolding/issues/15
139
143
  @dataclasses.dataclass
@@ -141,17 +145,16 @@ class IngredientsFunction:
141
145
  """
142
146
  Package a function definition with its import dependencies for code generation.
143
147
 
144
- IngredientsFunction encapsulates an AST function definition along with all the
145
- imports required for that function to operate correctly. This creates a modular,
146
- portable unit that can be:
148
+ IngredientsFunction encapsulates an AST function definition along with all the imports required for that function to
149
+ operate correctly. This creates a modular, portable unit that can be:
147
150
 
148
- 1. Transformed independently (e.g., by applying Numba decorators)
149
- 2. Transplanted between modules while maintaining dependencies
150
- 3. Combined with other functions to form complete modules
151
- 4. Analyzed for optimization opportunities
151
+ 1. Transformed independently (e.g., by applying Numba decorators).
152
+ 2. Transplanted between modules while maintaining dependencies.
153
+ 3. Combined with other functions to form complete modules.
154
+ 4. Analyzed for optimization opportunities.
152
155
 
153
- This class forms the primary unit of function manipulation in the code generation
154
- system, enabling targeted transformations while preserving function dependencies.
156
+ This class forms the primary unit of function manipulation in the code generation system, enabling targeted
157
+ transformations while preserving function dependencies.
155
158
 
156
159
  Parameters:
157
160
  astFunctionDef: The AST representation of the function definition
@@ -266,15 +269,18 @@ class IngredientsModule:
266
269
  for ingredientsFunction in self.listIngredientsFunctions:
267
270
  ingredientsFunction.imports.removeImportFrom(moduleWithLogicalPath, name, asname)
268
271
 
269
- @property
270
- def list_astImportImportFrom(self) -> list[ast.Import | ast.ImportFrom]:
271
- """List of `ast.Import` and `ast.ImportFrom` statements."""
272
+ def _consolidatedLedger(self) -> LedgerOfImports:
273
+ """Consolidate all ledgers of imports."""
272
274
  sherpaLedger = LedgerOfImports()
273
275
  listLedgers: list[LedgerOfImports] = [self.imports]
274
276
  for ingredientsFunction in self.listIngredientsFunctions:
275
277
  listLedgers.append(ingredientsFunction.imports)
276
278
  sherpaLedger.update(*listLedgers)
277
- return sherpaLedger.makeList_ast()
279
+ return sherpaLedger
280
+
281
+ @property
282
+ def list_astImportImportFrom(self) -> list[ast.Import | ast.ImportFrom]:
283
+ return self._consolidatedLedger().makeList_ast()
278
284
 
279
285
  @property
280
286
  def body(self) -> list[ast.stmt]:
@@ -291,7 +297,7 @@ class IngredientsModule:
291
297
  @property
292
298
  def type_ignores(self) -> list[ast.TypeIgnore]:
293
299
  listTypeIgnore: list[ast.TypeIgnore] = self.supplemental_type_ignores
294
- # listTypeIgnore.extend(self.imports.makeListAst())
300
+ listTypeIgnore.extend(self._consolidatedLedger().type_ignores)
295
301
  listTypeIgnore.extend(self.prologue.type_ignores)
296
302
  for ingredientsFunction in self.listIngredientsFunctions:
297
303
  listTypeIgnore.extend(ingredientsFunction.type_ignores)
@@ -316,7 +322,7 @@ class RecipeSynthesizeFlow:
316
322
 
317
323
  This configuration class serves as a single source of truth for the code generation
318
324
  process, ensuring consistency across all generated artifacts while enabling
319
- customization of the transformation pipeline.
325
+ customization of the transformation assembly line.
320
326
 
321
327
  The transformation process uses this configuration to extract functions from the
322
328
  source module, transform them according to optimization rules, and output
@@ -439,7 +445,7 @@ class ShatteredDataclass:
439
445
  fragments4AssignmentOrParameters: ast.Tuple = dummyTuple
440
446
  """AST tuple used as target for assignment to capture returned fragments."""
441
447
 
442
- ledger: LedgerOfImports = dataclasses.field(default_factory=LedgerOfImports)
448
+ imports: LedgerOfImports = dataclasses.field(default_factory=LedgerOfImports)
443
449
  """Import records for the dataclass and its constituent parts."""
444
450
 
445
451
  list_argAnnotated4ArgumentsSpecification: list[ast.arg] = dataclasses.field(default_factory=list)
@@ -1,18 +1,17 @@
1
1
  """
2
2
  Core AST Traversal and Transformation Utilities for Python Code Manipulation
3
3
 
4
- This module provides the foundation for traversing and modifying Python Abstract
5
- Syntax Trees (ASTs). It contains two primary classes:
4
+ This module provides the foundation for traversing and modifying Python Abstract Syntax Trees (ASTs). It contains two
5
+ primary classes:
6
6
 
7
- 1. NodeTourist: Implements the visitor pattern to traverse an AST and extract information
8
- from nodes that match specific predicates without modifying the AST.
7
+ 1. NodeTourist: Implements the visitor pattern to traverse an AST and extract information from nodes that match specific
8
+ predicates without modifying the AST.
9
9
 
10
- 2. NodeChanger: Extends ast.NodeTransformer to selectively transform AST nodes that
11
- match specific predicates, enabling targeted code modifications.
10
+ 2. NodeChanger: Extends ast.NodeTransformer to selectively transform AST nodes that match specific predicates, enabling
11
+ targeted code modifications.
12
12
 
13
- The module also provides utilities for importing modules, loading callables from files,
14
- and parsing Python code into AST structures, creating a complete workflow for code
15
- analysis and transformation.
13
+ The module also provides utilities for importing modules, loading callables from files, and parsing Python code into AST
14
+ structures, creating a complete workflow for code analysis and transformation.
16
15
  """
17
16
 
18
17
  from collections.abc import Callable
@@ -32,13 +31,12 @@ class NodeTourist(ast.NodeVisitor):
32
31
  """
33
32
  Visit and extract information from AST nodes that match a predicate.
34
33
 
35
- NodeTourist implements the visitor pattern to traverse an AST, applying
36
- a predicate function to each node and capturing nodes or their attributes
37
- when they match. Unlike NodeChanger, it doesn't modify the AST but collects
34
+ NodeTourist implements the visitor pattern to traverse an AST, applying a predicate function to each node and
35
+ capturing nodes or their attributes when they match. Unlike NodeChanger, it doesn't modify the AST but collects
38
36
  information during traversal.
39
37
 
40
- This class is particularly useful for analyzing AST structures, extracting
41
- specific nodes or node properties, and gathering information about code patterns.
38
+ This class is particularly useful for analyzing AST structures, extracting specific nodes or node properties, and
39
+ gathering information about code patterns.
42
40
  """
43
41
  def __init__(self, findThis: Callable[..., Any], doThat: Callable[..., Any]) -> None:
44
42
  self.findThis = findThis
@@ -61,12 +59,12 @@ class NodeChanger(ast.NodeTransformer):
61
59
  """
62
60
  Transform AST nodes that match a predicate by applying a transformation function.
63
61
 
64
- NodeChanger is an AST node transformer that applies a targeted transformation
65
- to nodes matching a specific predicate. It traverses the AST and only modifies
66
- nodes that satisfy the predicate condition, leaving other nodes unchanged.
62
+ NodeChanger is an AST node transformer that applies a targeted transformation to nodes matching a specific
63
+ predicate. It traverses the AST and only modifies nodes that satisfy the predicate condition, leaving other nodes
64
+ unchanged.
67
65
 
68
- This class extends ast.NodeTransformer and implements the visitor pattern
69
- to systematically process and transform an AST tree.
66
+ This class extends ast.NodeTransformer and implements the visitor pattern to systematically process and transform an
67
+ AST tree.
70
68
  """
71
69
  def __init__(self, findThis: Callable[..., Any], doThat: Callable[..., Any]) -> None:
72
70
  self.findThis = findThis
@@ -81,18 +79,18 @@ def importLogicalPath2Callable(logicalPathModule: str_nameDOTname, identifier: a
81
79
  """
82
80
  Import a callable object (function or class) from a module based on its logical path.
83
81
 
84
- This function imports a module using `importlib.import_module()` and then retrieves
85
- a specific attribute (function, class, or other object) from that module.
82
+ This function imports a module using `importlib.import_module()` and then retrieves a specific attribute (function,
83
+ class, or other object) from that module.
86
84
 
87
85
  Parameters
88
86
  ----------
89
- logicalPathModule : str
87
+ logicalPathModule
90
88
  The logical path to the module, using dot notation (e.g., 'package.subpackage.module').
91
- identifier : str
89
+ identifier
92
90
  The name of the callable object to retrieve from the module.
93
- packageIdentifierIfRelative : str, optional
94
- The package name to use as the anchor point if `logicalPathModule` is a relative import.
95
- If None, absolute import is assumed.
91
+ packageIdentifierIfRelative : None
92
+ The package name to use as the anchor point if `logicalPathModule` is a relative import. If None, absolute
93
+ import is assumed.
96
94
 
97
95
  Returns
98
96
  -------
@@ -105,16 +103,17 @@ def importLogicalPath2Callable(logicalPathModule: str_nameDOTname, identifier: a
105
103
  def importPathFilename2Callable(pathFilename: PathLike[Any] | PurePath, identifier: ast_Identifier, moduleIdentifier: ast_Identifier | None = None) -> Callable[..., Any]:
106
104
  """
107
105
  Load a callable (function, class, etc.) from a Python file.
108
- This function imports a specified Python file as a module, extracts a callable object
109
- from it by name, and returns that callable.
106
+
107
+ This function imports a specified Python file as a module, extracts a callable object from it by name, and returns
108
+ that callable.
110
109
 
111
110
  Parameters
112
111
  ----------
113
- pathFilename : Union[PathLike[Any], PurePath]
112
+ pathFilename
114
113
  Path to the Python file to import.
115
- identifier : str
114
+ identifier
116
115
  Name of the callable to extract from the imported module.
117
- moduleIdentifier : Optional[str]
116
+ moduleIdentifier
118
117
  Name to use for the imported module. If None, the filename stem is used.
119
118
 
120
119
  Returns
@@ -138,49 +137,52 @@ def importPathFilename2Callable(pathFilename: PathLike[Any] | PurePath, identifi
138
137
  importlibSpecification.loader.exec_module(moduleImported_jk_hahaha)
139
138
  return getattr(moduleImported_jk_hahaha, identifier)
140
139
 
141
- def parseLogicalPath2astModule(logicalPathModule: str_nameDOTname, packageIdentifierIfRelative: ast_Identifier|None=None, mode: Literal['exec'] = 'exec') -> ast.Module:
140
+ def parseLogicalPath2astModule(logicalPathModule: str_nameDOTname, packageIdentifierIfRelative: ast_Identifier | None = None, mode: Literal['exec'] = 'exec') -> ast.Module:
142
141
  """
143
- Parse a logical Python module path into an AST Module.
142
+ Parse a logical Python module path into an `ast.Module`.
144
143
 
145
- This function imports a module using its logical path (e.g., 'package.subpackage.module')
146
- and converts its source code into an Abstract Syntax Tree (AST) Module object.
144
+ This function imports a module using its logical path (e.g., 'package.subpackage.module') and converts its source
145
+ code into an Abstract Syntax Tree (AST) Module object.
147
146
 
148
147
  Parameters
149
148
  ----------
150
- logicalPathModule : str
149
+ logicalPathModule
151
150
  The logical path to the module using dot notation (e.g., 'package.module').
152
- packageIdentifierIfRelative : ast.Identifier or None, optional
151
+ packageIdentifierIfRelative : None
153
152
  The package identifier to use if the module path is relative, defaults to None.
154
- mode : Literal['exec'], optional
155
- The parsing mode to use, defaults to 'exec'.
153
+ mode : Literal['exec']
154
+ The mode parameter for `ast.parse`. Default is `Literal['exec']`. Options are `Literal['exec']`, `"exec"` (which
155
+ is _not_ the same as `Literal['exec']`), `Literal['eval']`, `Literal['func_type']`, `Literal['single']`. See
156
+ `ast.parse` documentation for some details and much confusion.
156
157
 
157
158
  Returns
158
159
  -------
159
- ast.Module
160
+ astModule
160
161
  An AST Module object representing the parsed source code of the imported module.
161
162
  """
162
163
  moduleImported: ModuleType = importlib.import_module(logicalPathModule, packageIdentifierIfRelative)
163
164
  sourcePython: str = inspect_getsource(moduleImported)
164
- return ast.parse(sourcePython, mode=mode)
165
+ return ast.parse(sourcePython, mode)
165
166
 
166
167
  def parsePathFilename2astModule(pathFilename: PathLike[Any] | PurePath, mode: Literal['exec'] = 'exec') -> ast.Module:
167
168
  """
168
- Parse a file from a given path into an ast.Module.
169
+ Parse a file from a given path into an `ast.Module`.
169
170
 
170
- This function reads the content of a file specified by `pathFilename` and parses it into an
171
- Abstract Syntax Tree (AST) Module using Python's ast module.
171
+ This function reads the content of a file specified by `pathFilename` and parses it into an Abstract Syntax Tree
172
+ (AST) Module using Python's ast module.
172
173
 
173
174
  Parameters
174
175
  ----------
175
- pathFilename : PathLike[Any] | PurePath
176
+ pathFilename
176
177
  The path to the file to be parsed. Can be a string path, PathLike object, or PurePath object.
177
- mode : Literal['exec'], optional
178
- The mode parameter for ast.parse. Default is 'exec'.
179
- Options are 'exec', 'eval', or 'single'. See ast.parse documentation for details.
178
+ mode : Literal['exec']
179
+ The mode parameter for `ast.parse`. Default is `Literal['exec']`. Options are `Literal['exec']`, `"exec"` (which
180
+ is _not_ the same as `Literal['exec']`), `Literal['eval']`, `Literal['func_type']`, `Literal['single']`. See
181
+ `ast.parse` documentation for some details and much confusion.
180
182
 
181
183
  Returns
182
184
  -------
183
- ast.Module
185
+ astModule
184
186
  The parsed abstract syntax tree module.
185
187
  """
186
- return ast.parse(Path(pathFilename).read_text(), mode=mode)
188
+ return ast.parse(Path(pathFilename).read_text(), mode)
@@ -6,7 +6,7 @@ for specific map folding calculation jobs. Unlike the general-purpose transforma
6
6
  in toolboxNumba.py, this module creates standalone Python modules optimized for a
7
7
  single map shape with statically-encoded parameters.
8
8
 
9
- The code generation pipeline focuses on:
9
+ The code generation assembly line focuses on:
10
10
 
11
11
  1. Converting function parameters to initialized variables with concrete values.
12
12
  2. Replacing dynamic computations with statically-known values.
@@ -22,6 +22,7 @@ from mapFolding import getPathFilenameFoldsTotal, raiseIfNoneGitHubIssueNumber3,
22
22
  from mapFolding.someAssemblyRequired import (
23
23
  ast_Identifier,
24
24
  be,
25
+ extractFunctionDef,
25
26
  ifThis,
26
27
  IngredientsFunction,
27
28
  IngredientsModule,
@@ -34,7 +35,7 @@ from mapFolding.someAssemblyRequired import (
34
35
  )
35
36
  from mapFolding.someAssemblyRequired.RecipeJob import RecipeJob
36
37
  from mapFolding.someAssemblyRequired.toolboxNumba import parametersNumbaLight, SpicesJobNumba, decorateCallableWithNumba
37
- from mapFolding.someAssemblyRequired.transformationTools import dictionaryEstimates, extractFunctionDef, write_astModule, makeInitializedComputationState
38
+ from mapFolding.someAssemblyRequired.transformationTools import dictionaryEstimates, write_astModule, makeInitializedComputationState
38
39
  from pathlib import PurePosixPath
39
40
  from typing import cast, NamedTuple
40
41
  from Z0Z_tools import autoDecodingRLE
@@ -131,7 +132,7 @@ def move_arg2FunctionDefDOTbodyAndAssignInitialValues(ingredientsFunction: Ingre
131
132
  Returns:
132
133
  The modified function with parameters converted to initialized variables.
133
134
  """
134
- ingredientsFunction.imports.update(job.shatteredDataclass.ledger)
135
+ ingredientsFunction.imports.update(job.shatteredDataclass.imports)
135
136
 
136
137
  list_argCuzMyBrainRefusesToThink = ingredientsFunction.astFunctionDef.args.args + ingredientsFunction.astFunctionDef.args.posonlyargs + ingredientsFunction.astFunctionDef.args.kwonlyargs
137
138
  list_arg_arg: list[ast_Identifier] = [ast_arg.arg for ast_arg in list_argCuzMyBrainRefusesToThink]
@@ -175,7 +176,7 @@ def makeJobNumba(job: RecipeJob, spices: SpicesJobNumba) -> None:
175
176
  """
176
177
  Generate a highly-optimized, single-purpose Numba module for a specific map shape.
177
178
 
178
- This function implements the complete transformation pipeline for creating a
179
+ This function implements the complete transformation assembly line for creating a
179
180
  standalone, specialized implementation for calculating map folding solutions for
180
181
  a specific shape. The process includes:
181
182
 
@@ -245,9 +246,9 @@ if __name__ == '__main__':
245
246
  Z0Z_asname: ast_Identifier | None = None
246
247
 
247
248
  listDatatypeConfigs = [
248
- DatatypeConfig(fml='DatatypeLeavesTotal', Z0Z_module='numba', Z0Z_type_name='uint16'),
249
+ DatatypeConfig(fml='DatatypeLeavesTotal', Z0Z_module='numba', Z0Z_type_name='uint8'),
249
250
  DatatypeConfig(fml='DatatypeElephino', Z0Z_module='numba', Z0Z_type_name='uint16'),
250
- DatatypeConfig(fml='DatatypeFoldsTotal', Z0Z_module='numba', Z0Z_type_name='int64'),
251
+ DatatypeConfig(fml='DatatypeFoldsTotal', Z0Z_module='numba', Z0Z_type_name='uint64'),
251
252
  ]
252
253
 
253
254
  for datatypeConfig in listDatatypeConfigs:
@@ -261,9 +262,9 @@ if __name__ == '__main__':
261
262
  ingredientsCount.imports.removeImportFromModule('mapFolding.theSSOT')
262
263
 
263
264
  listNumPyTypeConfigs = [
264
- DatatypeConfig(fml='Array1DLeavesTotal', Z0Z_module='numpy', Z0Z_type_name='uint16', Z0Z_asname='Array1DLeavesTotal'),
265
+ DatatypeConfig(fml='Array1DLeavesTotal', Z0Z_module='numpy', Z0Z_type_name='uint8', Z0Z_asname='Array1DLeavesTotal'),
265
266
  DatatypeConfig(fml='Array1DElephino', Z0Z_module='numpy', Z0Z_type_name='uint16', Z0Z_asname='Array1DElephino'),
266
- DatatypeConfig(fml='Array3D', Z0Z_module='numpy', Z0Z_type_name='uint16', Z0Z_asname='Array3D'),
267
+ DatatypeConfig(fml='Array3D', Z0Z_module='numpy', Z0Z_type_name='uint8', Z0Z_asname='Array3D'),
267
268
  ]
268
269
 
269
270
  for typeConfig in listNumPyTypeConfigs:
@@ -299,7 +300,7 @@ if __name__ == '__main__':
299
300
  """
300
301
 
301
302
  if __name__ == '__main__':
302
- mapShape = (2,2,2,2,2,2,2,2)
303
+ mapShape = (2,21)
303
304
  state = makeInitializedComputationState(mapShape)
304
305
  # foldsTotalEstimated = getFoldsTotalKnown(state.mapShape) // state.leavesTotal
305
306
  foldsTotalEstimated = dictionaryEstimates[state.mapShape] // state.leavesTotal
@@ -158,7 +158,7 @@ def makeNumbaFlow(numbaFlow: RecipeSynthesizeFlow) -> None:
158
158
  """
159
159
  Transform standard Python algorithm code into optimized Numba implementations.
160
160
 
161
- This function implements the complete transformation pipeline that converts
161
+ This function implements the complete transformation assembly line that converts
162
162
  a conventional Python implementation into a high-performance Numba-accelerated
163
163
  version. The process includes:
164
164
 
@@ -22,11 +22,12 @@ from autoflake import fix_code as autoflake_fix_code
22
22
  from collections.abc import Callable, Mapping
23
23
  from copy import deepcopy
24
24
  from mapFolding.beDRY import outfitCountFolds
25
- from mapFolding.toolboxFilesystem import getPathFilenameFoldsTotal, writeStringToHere
26
25
  from mapFolding.someAssemblyRequired import (
27
26
  ast_Identifier,
27
+ astModuleToIngredientsFunction,
28
28
  be,
29
29
  DOT,
30
+ extractClassDef,
30
31
  grab,
31
32
  ifThis,
32
33
  importLogicalPath2Callable,
@@ -44,6 +45,7 @@ from mapFolding.someAssemblyRequired import (
44
45
  个,
45
46
  )
46
47
  from mapFolding.theSSOT import ComputationState, raiseIfNoneGitHubIssueNumber3, The
48
+ from mapFolding.toolboxFilesystem import getPathFilenameFoldsTotal, writeStringToHere
47
49
  from os import PathLike
48
50
  from pathlib import Path, PurePath
49
51
  from typing import Any, Literal, overload
@@ -52,59 +54,6 @@ import dataclasses
52
54
  import pickle
53
55
  import python_minifier
54
56
 
55
- def astModuleToIngredientsFunction(astModule: ast.AST, identifierFunctionDef: ast_Identifier) -> IngredientsFunction:
56
- """
57
- Extract a function definition from an AST module and create an IngredientsFunction.
58
-
59
- This function finds a function definition with the specified identifier in the given
60
- AST module and wraps it in an IngredientsFunction object along with its import context.
61
-
62
- Parameters:
63
- astModule: The AST module containing the function definition.
64
- identifierFunctionDef: The name of the function to extract.
65
-
66
- Returns:
67
- An IngredientsFunction object containing the function definition and its imports.
68
-
69
- Raises:
70
- raiseIfNoneGitHubIssueNumber3: If the function definition is not found.
71
- """
72
- astFunctionDef = extractFunctionDef(astModule, identifierFunctionDef)
73
- if not astFunctionDef: raise raiseIfNoneGitHubIssueNumber3
74
- return IngredientsFunction(astFunctionDef, LedgerOfImports(astModule))
75
-
76
- def extractClassDef(module: ast.AST, identifier: ast_Identifier) -> ast.ClassDef | None:
77
- """
78
- Extract a class definition with a specific name from an AST module.
79
-
80
- This function searches through an AST module for a class definition that
81
- matches the provided identifier and returns it if found.
82
-
83
- Parameters:
84
- module: The AST module to search within.
85
- identifier: The name of the class to find.
86
-
87
- Returns:
88
- The matching class definition AST node, or None if not found.
89
- """
90
- return NodeTourist(ifThis.isClassDef_Identifier(identifier), Then.extractIt).captureLastMatch(module)
91
-
92
- def extractFunctionDef(module: ast.AST, identifier: ast_Identifier) -> ast.FunctionDef | None:
93
- """
94
- Extract a function definition with a specific name from an AST module.
95
-
96
- This function searches through an AST module for a function definition that
97
- matches the provided identifier and returns it if found.
98
-
99
- Parameters:
100
- module: The AST module to search within.
101
- identifier: The name of the function to find.
102
-
103
- Returns:
104
- astFunctionDef: The matching function definition AST node, or None if not found.
105
- """
106
- return NodeTourist(ifThis.isFunctionDef_Identifier(identifier), Then.extractIt).captureLastMatch(module)
107
-
108
57
  def makeDictionaryFunctionDef(module: ast.Module) -> dict[ast_Identifier, ast.FunctionDef]:
109
58
  """
110
59
  Create a dictionary mapping function names to their AST definitions.
@@ -404,8 +353,8 @@ def shatter_dataclassesDOTdataclass(logicalPathModule: str_nameDOTname, dataclas
404
353
  shatteredDataclass.repack = Make.Assign(listTargets=[Make.Name(instance_Identifier)], value=Make.Call(Make.Name(dataclass_Identifier), list_astKeywords=shatteredDataclass.list_keyword_field__field4init))
405
354
  shatteredDataclass.signatureReturnAnnotation = Make.Subscript(Make.Name('tuple'), Make.Tuple(shatteredDataclass.listAnnotations))
406
355
 
407
- shatteredDataclass.ledger.update(*(dictionaryDeReConstruction[field].ledger for field in Official_fieldOrder))
408
- shatteredDataclass.ledger.addImportFrom_asStr(logicalPathModule, dataclass_Identifier)
356
+ shatteredDataclass.imports.update(*(dictionaryDeReConstruction[field].ledger for field in Official_fieldOrder))
357
+ shatteredDataclass.imports.addImportFrom_asStr(logicalPathModule, dataclass_Identifier)
409
358
 
410
359
  return shatteredDataclass
411
360
 
@@ -424,7 +373,7 @@ def write_astModule(ingredients: IngredientsModule, pathFilename: PathLike[Any]
424
373
  4. Optimizes imports using autoflake
425
374
  5. Writes the final source code to the specified file location
426
375
 
427
- This is typically the final step in the code generation pipeline,
376
+ This is typically the final step in the code generation assembly line,
428
377
  producing optimized Python modules ready for execution.
429
378
 
430
379
  Parameters:
@@ -448,6 +397,27 @@ def write_astModule(ingredients: IngredientsModule, pathFilename: PathLike[Any]
448
397
 
449
398
  # END of acceptable classes and functions ======================================================
450
399
  def removeUnusedParameters(ingredientsFunction: IngredientsFunction) -> IngredientsFunction:
400
+ """
401
+ Removes unused parameters from a function's AST definition, return statement, and annotation.
402
+
403
+ This function analyzes the Abstract Syntax Tree (AST) of a given function and removes
404
+ any parameters that are not referenced within the function body. It updates the
405
+ function signature, the return statement (if it's a tuple containing unused variables),
406
+ and the return type annotation accordingly.
407
+
408
+ Parameters
409
+ ----------
410
+ ingredientsFunction : IngredientsFunction
411
+ An object containing the AST representation of a function to be processed.
412
+
413
+ Returns
414
+ -------
415
+ IngredientsFunction
416
+ The modified IngredientsFunction object with unused parameters and corresponding
417
+ return elements/annotations removed from its AST.
418
+
419
+ The modification is done in-place on the original AST nodes within the IngredientsFunction object.
420
+ """
451
421
  list_argCuzMyBrainRefusesToThink = ingredientsFunction.astFunctionDef.args.args + ingredientsFunction.astFunctionDef.args.posonlyargs + ingredientsFunction.astFunctionDef.args.kwonlyargs
452
422
  list_arg_arg: list[ast_Identifier] = [ast_arg.arg for ast_arg in list_argCuzMyBrainRefusesToThink]
453
423
  listName: list[ast.Name] = []
@@ -459,6 +429,18 @@ def removeUnusedParameters(ingredientsFunction: IngredientsFunction) -> Ingredie
459
429
  for arg_Identifier in list_IdentifiersNotUsed:
460
430
  remove_arg = NodeChanger(ifThis.is_arg_Identifier(arg_Identifier), Then.removeIt)
461
431
  remove_arg.visit(ingredientsFunction.astFunctionDef)
432
+
433
+ list_argCuzMyBrainRefusesToThink = ingredientsFunction.astFunctionDef.args.args + ingredientsFunction.astFunctionDef.args.posonlyargs + ingredientsFunction.astFunctionDef.args.kwonlyargs
434
+
435
+ listName: list[ast.Name] = [Make.Name(ast_arg.arg) for ast_arg in list_argCuzMyBrainRefusesToThink]
436
+ replaceReturn = NodeChanger(be.Return, Then.replaceWith(Make.Return(Make.Tuple(listName))))
437
+ replaceReturn.visit(ingredientsFunction.astFunctionDef)
438
+
439
+ list_annotation: list[ast.expr] = [ast_arg.annotation for ast_arg in list_argCuzMyBrainRefusesToThink if ast_arg.annotation is not None]
440
+ ingredientsFunction.astFunctionDef.returns = Make.Subscript(Make.Name('tuple'), Make.Tuple(list_annotation))
441
+
442
+ ast.fix_missing_locations(ingredientsFunction.astFunctionDef)
443
+
462
444
  return ingredientsFunction
463
445
 
464
446
  def makeNewFlow(recipeFlow: RecipeSynthesizeFlow) -> IngredientsModule:
@@ -514,7 +496,7 @@ def makeNewFlow(recipeFlow: RecipeSynthesizeFlow) -> IngredientsModule:
514
496
  instance_Identifier = recipeFlow.dataclassInstance
515
497
  getTheOtherRecord_damn = recipeFlow.dataclassInstanceTaskDistribution
516
498
  shatteredDataclass = shatter_dataclassesDOTdataclass(recipeFlow.logicalPathModuleDataclass, recipeFlow.sourceDataclassIdentifier, instance_Identifier)
517
- ingredientsDispatcher.imports.update(shatteredDataclass.ledger)
499
+ ingredientsDispatcher.imports.update(shatteredDataclass.imports)
518
500
 
519
501
  # How can I use dataclass settings as the SSOT for specific actions? https://github.com/hunterhogan/mapFolding/issues/16
520
502
  # Change callable parameters and Call to the callable at the same time ====
@@ -0,0 +1,25 @@
1
+ from mapFolding.dataBaskets import Array1DElephino, Array1DLeavesTotal, Array3D, DatatypeElephino, DatatypeFoldsTotal, DatatypeLeavesTotal, MapFoldingState
2
+ from mapFolding.syntheticModules.theorem2Numba import count
3
+
4
+ def doTheNeedful(state: MapFoldingState) -> MapFoldingState:
5
+ mapShape: tuple[DatatypeLeavesTotal, ...] = state.mapShape
6
+ groupsOfFolds: DatatypeFoldsTotal = state.groupsOfFolds
7
+ gap1ndex: DatatypeElephino = state.gap1ndex
8
+ gap1ndexCeiling: DatatypeElephino = state.gap1ndexCeiling
9
+ indexDimension: DatatypeLeavesTotal = state.indexDimension
10
+ indexLeaf: DatatypeLeavesTotal = state.indexLeaf
11
+ indexMiniGap: DatatypeElephino = state.indexMiniGap
12
+ leaf1ndex: DatatypeLeavesTotal = state.leaf1ndex
13
+ leafConnectee: DatatypeLeavesTotal = state.leafConnectee
14
+ dimensionsUnconstrained: DatatypeLeavesTotal = state.dimensionsUnconstrained
15
+ countDimensionsGapped: Array1DLeavesTotal = state.countDimensionsGapped
16
+ gapRangeStart: Array1DElephino = state.gapRangeStart
17
+ gapsWhere: Array1DLeavesTotal = state.gapsWhere
18
+ leafAbove: Array1DLeavesTotal = state.leafAbove
19
+ leafBelow: Array1DLeavesTotal = state.leafBelow
20
+ connectionGraph: Array3D = state.connectionGraph
21
+ dimensionsTotal: DatatypeLeavesTotal = state.dimensionsTotal
22
+ leavesTotal: DatatypeLeavesTotal = state.leavesTotal
23
+ groupsOfFolds, gap1ndex, gap1ndexCeiling, indexDimension, indexLeaf, indexMiniGap, leaf1ndex, leafConnectee, dimensionsUnconstrained, countDimensionsGapped, gapRangeStart, gapsWhere, leafAbove, leafBelow, connectionGraph, dimensionsTotal, leavesTotal = count(groupsOfFolds, gap1ndex, gap1ndexCeiling, indexDimension, indexLeaf, indexMiniGap, leaf1ndex, leafConnectee, dimensionsUnconstrained, countDimensionsGapped, gapRangeStart, gapsWhere, leafAbove, leafBelow, connectionGraph, dimensionsTotal, leavesTotal)
24
+ state = MapFoldingState(mapShape=mapShape, groupsOfFolds=groupsOfFolds, gap1ndex=gap1ndex, gap1ndexCeiling=gap1ndexCeiling, indexDimension=indexDimension, indexLeaf=indexLeaf, indexMiniGap=indexMiniGap, leaf1ndex=leaf1ndex, leafConnectee=leafConnectee, dimensionsUnconstrained=dimensionsUnconstrained, countDimensionsGapped=countDimensionsGapped, gapRangeStart=gapRangeStart, gapsWhere=gapsWhere, leafAbove=leafAbove, leafBelow=leafBelow)
25
+ return state