mapFolding 0.12.2__py3-none-any.whl → 0.13.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (33) hide show
  1. mapFolding/__init__.py +4 -2
  2. mapFolding/_theSSOT.py +32 -88
  3. mapFolding/{datatypes.py → _theTypes.py} +25 -3
  4. mapFolding/basecamp.py +38 -33
  5. mapFolding/beDRY.py +79 -54
  6. mapFolding/dataBaskets.py +123 -93
  7. mapFolding/filesystemToolkit.py +140 -91
  8. mapFolding/oeis.py +243 -145
  9. mapFolding/reference/flattened.py +1 -1
  10. mapFolding/someAssemblyRequired/RecipeJob.py +116 -100
  11. mapFolding/someAssemblyRequired/__init__.py +40 -15
  12. mapFolding/someAssemblyRequired/_toolIfThis.py +82 -54
  13. mapFolding/someAssemblyRequired/_toolkitContainers.py +19 -16
  14. mapFolding/someAssemblyRequired/getLLVMforNoReason.py +35 -26
  15. mapFolding/someAssemblyRequired/makeAllModules.py +353 -283
  16. mapFolding/someAssemblyRequired/makeJobTheorem2Numba.py +83 -84
  17. mapFolding/someAssemblyRequired/makeJobTheorem2codon.py +256 -0
  18. mapFolding/someAssemblyRequired/toolkitNumba.py +80 -50
  19. mapFolding/someAssemblyRequired/transformationTools.py +63 -40
  20. {tests → mapFolding/tests}/__init__.py +2 -2
  21. {tests → mapFolding/tests}/conftest.py +232 -63
  22. {tests → mapFolding/tests}/test_computations.py +58 -18
  23. {tests → mapFolding/tests}/test_filesystem.py +10 -13
  24. {tests → mapFolding/tests}/test_oeis.py +5 -18
  25. {tests → mapFolding/tests}/test_other.py +9 -9
  26. {tests → mapFolding/tests}/test_tasks.py +7 -9
  27. {mapfolding-0.12.2.dist-info → mapfolding-0.13.0.dist-info}/METADATA +24 -37
  28. mapfolding-0.13.0.dist-info/RECORD +54 -0
  29. {mapfolding-0.12.2.dist-info → mapfolding-0.13.0.dist-info}/top_level.txt +0 -1
  30. mapfolding-0.12.2.dist-info/RECORD +0 -53
  31. {mapfolding-0.12.2.dist-info → mapfolding-0.13.0.dist-info}/WHEEL +0 -0
  32. {mapfolding-0.12.2.dist-info → mapfolding-0.13.0.dist-info}/entry_points.txt +0 -0
  33. {mapfolding-0.12.2.dist-info → mapfolding-0.13.0.dist-info}/licenses/LICENSE +0 -0
@@ -380,7 +380,7 @@ def parseDimensions(dimensions: Sequence[int], parameterName: str = 'unnamed par
380
380
  def setCPUlimit(CPUlimit: bool | float | int | None) -> int:
381
381
  # if not (CPUlimit is None or isinstance(CPUlimit, (bool, int, float))):
382
382
  # CPUlimit = oopsieKwargsie(CPUlimit)
383
- # concurrencyLimit = defineConcurrencyLimit(CPUlimit)
383
+ # concurrencyLimit = defineConcurrencyLimit(limit=CPUlimit)
384
384
  # numba.set_num_threads(concurrencyLimit)
385
385
  concurrencyLimitHARDCODED = 1
386
386
  concurrencyLimit = concurrencyLimitHARDCODED
@@ -1,39 +1,11 @@
1
- """
2
- Map folding AST transformation system: Configuration management and transformation orchestration.
3
-
4
- This module provides the configuration orchestration layer of the map folding AST transformation
5
- system, implementing comprehensive recipes that coordinate the entire transformation process from
6
- abstract mathematical algorithms to optimized computational modules. The `RecipeJobTheorem2Numba`
7
- dataclass serves as the central configuration blueprint that bridges pattern recognition, dataclass
8
- decomposition, function optimization, and Numba compilation into a unified transformation process.
9
-
10
- The recipe system addresses the complexity of managing transformation parameters across multiple
11
- stages while maintaining consistency between source algorithm metadata and target optimization
12
- requirements. The orchestration layer coordinates the systematic extraction of mathematical
13
- functions from source modules, embedding of concrete parameter values, elimination of dead code
14
- paths, and generation of standalone Python modules optimized for specific map dimensions through
15
- the complete transformation process.
16
-
17
- Configuration management separates source analysis capabilities from target generation parameters,
18
- enabling systematic exploration of computational spaces through automated generation of optimized
19
- solvers. Source analysis encompasses parsing and analysis of abstract syntax trees from generic
20
- algorithm modules, extraction of specific mathematical functions for specialization, and
21
- identification of dataclass structures for parameter embedding. Target generation coordinates
22
- creation of standalone Python modules with optimized implementations, integration of Numba
23
- optimization directives, and preservation of mathematical correctness throughout optimization.
24
-
25
- The recipe system enables the broader map folding research framework by providing systematic
26
- control over the transformation process while ensuring that generated modules achieve maximum
27
- performance through compile-time specialization and runtime optimization strategies.
28
- """
1
+ """Configuration by dataclass."""
29
2
 
30
3
  from ast import Module
31
4
  from astToolkit import identifierDotAttribute, parseLogicalPath2astModule
32
5
  from mapFolding import (
33
6
  DatatypeElephino as TheDatatypeElephino, DatatypeFoldsTotal as TheDatatypeFoldsTotal,
34
- DatatypeLeavesTotal as TheDatatypeLeavesTotal, getPathFilenameFoldsTotal, getPathRootJobDEFAULT,
35
- MapFoldingState, packageSettings,
36
- )
7
+ DatatypeLeavesTotal as TheDatatypeLeavesTotal, getPathFilenameFoldsTotal, getPathRootJobDEFAULT, MapFoldingState,
8
+ packageSettings)
37
9
  from mapFolding.someAssemblyRequired import dataclassInstanceIdentifierDEFAULT, ShatteredDataclass
38
10
  from mapFolding.someAssemblyRequired.transformationTools import shatter_dataclassesDOTdataclass
39
11
  from pathlib import Path, PurePosixPath
@@ -41,113 +13,153 @@ from typing import TypeAlias
41
13
  import dataclasses
42
14
 
43
15
  @dataclasses.dataclass
44
- class RecipeJobTheorem2Numba:
45
- """Configuration recipe for generating Numba-optimized map folding computation jobs.
16
+ class RecipeJobTheorem2:
17
+ """Configuration recipe for generating map folding computation jobs.
46
18
 
47
19
  This dataclass serves as the central configuration hub for the code transformation
48
- pipeline that converts generic map folding algorithms into highly optimized,
20
+ assembly line that converts generic map folding algorithms into highly optimized,
49
21
  specialized computation modules. The recipe encapsulates all parameters required
50
22
  for source code analysis, target file generation, datatype mapping, and compilation
51
23
  optimization settings.
52
24
 
53
25
  The transformation process operates by extracting functions from source modules,
54
26
  embedding concrete parameter values, eliminating dead code paths, and generating
55
- standalone Python modules optimized for specific map dimensions. These generated
56
- modules achieve maximum performance through Numba just-in-time compilation and
57
- embedded compile-time constants.
27
+ standalone Python modules optimized for specific map dimensions.
58
28
 
59
29
  The recipe maintains both source configuration (where to find the generic algorithm)
60
30
  and target configuration (where to write the optimized module), along with the
61
31
  computational state that provides concrete values for the transformation process.
62
32
 
63
- Attributes:
64
- state: The map folding computation state containing dimensions and initial values.
65
- foldsTotalEstimated: Estimated total number of folds for progress tracking (0).
66
- shatteredDataclass: Deconstructed dataclass metadata for code transformation.
67
- source_astModule: Parsed AST of the source module containing the generic algorithm.
68
- sourceCountCallable: Name of the counting function to extract ('count').
69
- sourceLogicalPathModuleDataclass: Logical path to the dataclass module.
70
- sourceDataclassIdentifier: Name of the source dataclass ('MapFoldingState').
71
- sourceDataclassInstance: Instance identifier for the dataclass.
72
- sourcePathPackage: Path to the source package.
73
- sourcePackageIdentifier: Name of the source package.
74
- pathPackage: Override path for the target package (None).
75
- pathModule: Override path for the target module directory.
76
- fileExtension: File extension for generated modules.
77
- pathFilenameFoldsTotal: Path for writing fold count results.
78
- packageIdentifier: Target package identifier (None).
79
- logicalPathRoot: Logical path root corresponding to filesystem directory.
80
- moduleIdentifier: Target module identifier.
81
- countCallable: Name of the counting function in generated module.
82
- dataclassIdentifier: Target dataclass identifier.
83
- dataclassInstance: Target dataclass instance identifier.
84
- logicalPathModuleDataclass: Logical path to target dataclass module.
85
- DatatypeFoldsTotal: Type alias for fold count datatype.
86
- DatatypeElephino: Type alias for intermediate computation datatype.
87
- DatatypeLeavesTotal: Type alias for leaf count datatype.
33
+ Attributes
34
+ ----------
35
+ state : MapFoldingState
36
+ The map folding computation state containing dimensions and initial values.
37
+ foldsTotalEstimated : int = 0
38
+ Estimated total number of folds for progress tracking.
39
+ shatteredDataclass : ShatteredDataclass = None
40
+ Deconstructed dataclass metadata for code transformation.
41
+ source_astModule : Module
42
+ Parsed AST of the source module containing the generic algorithm.
43
+ sourceCountCallable : str = 'count'
44
+ Name of the counting function to extract.
45
+ sourceLogicalPathModuleDataclass : identifierDotAttribute
46
+ Logical path to the dataclass module.
47
+ sourceDataclassIdentifier : str = 'MapFoldingState'
48
+ Name of the source dataclass.
49
+ sourceDataclassInstance : str
50
+ Instance identifier for the dataclass.
51
+ sourcePathPackage : PurePosixPath | None
52
+ Path to the source package.
53
+ sourcePackageIdentifier : str | None
54
+ Name of the source package.
55
+ pathPackage : PurePosixPath | None = None
56
+ Override path for the target package.
57
+ pathModule : PurePosixPath | None
58
+ Override path for the target module directory.
59
+ fileExtension : str
60
+ File extension for generated modules.
61
+ pathFilenameFoldsTotal : PurePosixPath = None
62
+ Path for writing fold count results.
63
+ packageIdentifier : str | None = None
64
+ Target package identifier.
65
+ logicalPathRoot : identifierDotAttribute | None = None
66
+ Logical path root; probably corresponds to physical filesystem directory.
67
+ moduleIdentifier : str = None
68
+ Target module identifier.
69
+ countCallable : str
70
+ Name of the counting function in generated module.
71
+ dataclassIdentifier : str | None
72
+ Target dataclass identifier.
73
+ dataclassInstance : str | None
74
+ Target dataclass instance identifier.
75
+ logicalPathModuleDataclass : identifierDotAttribute | None
76
+ Logical path to target dataclass module.
77
+ DatatypeFoldsTotal : TypeAlias
78
+ Type alias for fold count datatype.
79
+ DatatypeElephino : TypeAlias
80
+ Type alias for intermediate computation datatype.
81
+ DatatypeLeavesTotal : TypeAlias
82
+ Type alias for leaf count datatype.
88
83
  """
84
+
89
85
  state: MapFoldingState
90
- # TODO create function to calculate `foldsTotalEstimated`
86
+ """The map folding computation state containing dimensions and initial values."""
91
87
  foldsTotalEstimated: int = 0
88
+ """Estimated total number of folds for progress tracking."""
92
89
  shatteredDataclass: ShatteredDataclass = dataclasses.field(default=None, init=True) # pyright: ignore[reportAssignmentType]
90
+ """Deconstructed dataclass metadata for code transformation."""
93
91
 
94
- # ========================================
95
- # Source
96
- source_astModule: Module = parseLogicalPath2astModule('mapFolding.syntheticModules.theorem2Numba')
92
+ # Source -----------------------------------------
93
+ source_astModule: Module = parseLogicalPath2astModule('mapFolding.syntheticModules.theorem2Numba') # noqa: RUF009
94
+ """Parsed AST of the source module containing the generic algorithm."""
97
95
  sourceCountCallable: str = 'count'
96
+ """Name of the counting function to extract."""
98
97
 
99
98
  sourceLogicalPathModuleDataclass: identifierDotAttribute = 'mapFolding.dataBaskets'
99
+ """Logical path to the dataclass module."""
100
100
  sourceDataclassIdentifier: str = 'MapFoldingState'
101
+ """Name of the source dataclass."""
101
102
  sourceDataclassInstance: str = dataclassInstanceIdentifierDEFAULT
103
+ """Instance identifier for the dataclass."""
102
104
 
103
- sourcePathPackage: PurePosixPath | None = PurePosixPath(packageSettings.pathPackage)
104
- sourcePackageIdentifier: str | None = packageSettings.packageName
105
+ sourcePathPackage: PurePosixPath | None = PurePosixPath(packageSettings.pathPackage) # noqa: RUF009
106
+ """Path to the source package."""
107
+ sourcePackageIdentifier: str | None = packageSettings.identifierPackage
108
+ """Name of the source package."""
105
109
 
106
- # ========================================
107
- # Filesystem (names of physical objects)
110
+ # Filesystem, names of physical objects ------------------------------------------
108
111
  pathPackage: PurePosixPath | None = None
109
- pathModule: PurePosixPath | None = PurePosixPath(getPathRootJobDEFAULT())
110
- """ `pathModule` will override `pathPackage` and `logicalPathRoot`."""
112
+ """Override path for the target package."""
113
+ pathModule: PurePosixPath | None = PurePosixPath(getPathRootJobDEFAULT()) # noqa: RUF009
114
+ """Override path for the target module directory."""
111
115
  fileExtension: str = packageSettings.fileExtension
116
+ """File extension for generated modules."""
112
117
  pathFilenameFoldsTotal: PurePosixPath = dataclasses.field(default=None, init=True) # pyright: ignore[reportAssignmentType]
118
+ """Path for writing fold count results."""
113
119
 
114
- # ========================================
115
- # Logical identifiers (as opposed to physical identifiers)
120
+ # Logical identifiers, as opposed to physical identifiers ------------------------
116
121
  packageIdentifier: str | None = None
122
+ """Target package identifier."""
117
123
  logicalPathRoot: identifierDotAttribute | None = None
118
- """ `logicalPathRoot` likely corresponds to a physical filesystem directory."""
124
+ """Logical path root; probably corresponds to physical filesystem directory."""
119
125
  moduleIdentifier: str = dataclasses.field(default=None, init=True) # pyright: ignore[reportAssignmentType]
126
+ """Target module identifier."""
120
127
  countCallable: str = sourceCountCallable
128
+ """Name of the counting function in generated module."""
121
129
  dataclassIdentifier: str | None = sourceDataclassIdentifier
130
+ """Target dataclass identifier."""
122
131
  dataclassInstance: str | None = sourceDataclassInstance
132
+ """Target dataclass instance identifier."""
123
133
  logicalPathModuleDataclass: identifierDotAttribute | None = sourceLogicalPathModuleDataclass
134
+ """Logical path to target dataclass module."""
124
135
 
125
- # ========================================
126
- # Datatypes
136
+ # Datatypes ------------------------------------------
127
137
  DatatypeFoldsTotal: TypeAlias = TheDatatypeFoldsTotal
138
+ """Type alias for datatype linked to the magnitude of `foldsTotal`."""
128
139
  DatatypeElephino: TypeAlias = TheDatatypeElephino
140
+ """Type alias for intermediate computation datatype."""
129
141
  DatatypeLeavesTotal: TypeAlias = TheDatatypeLeavesTotal
142
+ """Type alias for datatype linked to the magnitude of `leavesTotal`."""
130
143
 
131
- def _makePathFilename(self,
132
- pathRoot: PurePosixPath | None = None,
133
- logicalPathINFIX: identifierDotAttribute | None = None,
134
- filenameStem: str | None = None,
135
- fileExtension: str | None = None,
136
- ) -> PurePosixPath:
144
+ def _makePathFilename(self, pathRoot: PurePosixPath | None = None, logicalPathINFIX: identifierDotAttribute | None = None, filenameStem: str | None = None, fileExtension: str | None = None) -> PurePosixPath:
137
145
  """Construct a complete file path from component parts.
138
146
 
139
- This helper method builds filesystem paths by combining a root directory,
140
- optional subdirectory structure, filename stem, and file extension. It provides
141
- sensible defaults for missing components based on the recipe configuration.
142
-
143
- Parameters:
144
- pathRoot: Base directory path. Defaults to package path or current directory.
145
- logicalPathINFIX: Dot-separated path segments to insert between root and filename.
146
- filenameStem: Base filename without extension. Defaults to module identifier.
147
- fileExtension: File extension including dot. Defaults to configured extension.
147
+ Parameters
148
+ ----------
149
+ pathRoot : PurePosixPath | None = None
150
+ Base directory path. Defaults to package path or current directory.
151
+ logicalPathINFIX : identifierDotAttribute | None = None
152
+ Dot-separated path segments to insert between root and filename.
153
+ filenameStem : str | None = None
154
+ Base filename without extension. Defaults to module identifier.
155
+ fileExtension : str | None = None
156
+ File extension including dot. Defaults to configured extension.
157
+
158
+ Returns
159
+ -------
160
+ pathFilename : PurePosixPath
161
+ Complete file path as a `PurePosixPath` object.
148
162
 
149
- Returns:
150
- Complete file path as a PurePosixPath object.
151
163
  """
152
164
  if pathRoot is None:
153
165
  pathRoot = self.pathPackage or PurePosixPath(Path.cwd())
@@ -166,11 +178,14 @@ class RecipeJobTheorem2Numba:
166
178
  """Generate the complete path and filename for the output module.
167
179
 
168
180
  This property computes the target location where the generated computation
169
- module will be written. It respects the pathModule override if specified,
181
+ module will be written. It respects the `pathModule` override if specified,
170
182
  otherwise constructs the path using the default package structure.
171
183
 
172
- Returns:
184
+ Returns
185
+ -------
186
+ pathFilename : PurePosixPath
173
187
  Complete path to the target module file.
188
+
174
189
  """
175
190
  if self.pathModule is None:
176
191
  return self._makePathFilename()
@@ -180,13 +195,14 @@ class RecipeJobTheorem2Numba:
180
195
  def __post_init__(self) -> None:
181
196
  """Initialize computed fields and validate configuration after dataclass creation.
182
197
 
183
- This method performs post-initialization setup including:
184
- 1. Deriving module identifier from map shape if not explicitly provided
185
- 2. Setting default paths for fold total output files
186
- 3. Creating shattered dataclass metadata for code transformations
198
+ This method performs post-initialization setup including deriving module
199
+ identifier from map shape if not explicitly provided, setting default paths
200
+ for fold total output files, and creating shattered dataclass metadata for
201
+ code transformations.
187
202
 
188
203
  The initialization ensures all computed fields are properly set based on
189
204
  the provided configuration and sensible defaults.
205
+
190
206
  """
191
207
  pathFilenameFoldsTotal = PurePosixPath(getPathFilenameFoldsTotal(self.state.mapShape))
192
208
 
@@ -1,26 +1,40 @@
1
1
  """
2
2
  Map folding AST transformation system: Comprehensive framework for converting dataclass-based algorithms to optimized implementations.
3
3
 
4
- This subpackage implements a sophisticated Abstract Syntax Tree (AST) transformation system specifically designed to convert high-level dataclass-based map folding algorithms into highly optimized, Numba-compatible implementations. The transformation system addresses a fundamental challenge in high-performance scientific computing: bridging the gap between maintainable, object-oriented algorithm implementations and the performance requirements of computationally intensive mathematical research.
4
+ This subpackage implements a sophisticated Abstract Syntax Tree (AST) transformation system specifically designed to convert
5
+ high-level dataclass-based map folding algorithms into highly optimized, Numba-compatible implementations. The transformation
6
+ system addresses a fundamental challenge in high-performance scientific computing: bridging the gap between maintainable,
7
+ object-oriented algorithm implementations and the performance requirements of computationally intensive mathematical research.
5
8
 
6
- The map folding problem domain involves complex combinatorial calculations that can require hours or days to complete for specific dimensional configurations. While dataclass-based implementations provide clean, maintainable interfaces for managing complex mathematical state, these objects cannot be directly processed by Numba's just-in-time compiler, which excels at optimizing operations on primitive values and tuples. This subpackage resolves this architectural tension through systematic AST manipulation that preserves algorithmic correctness while enabling dramatic performance improvements.
9
+ The map folding problem domain involves complex combinatorial calculations that can require hours or days to complete for specific
10
+ dimensional configurations. While dataclass-based implementations provide clean, maintainable interfaces for managing complex
11
+ mathematical state, these objects cannot be directly processed by Numba's just-in-time compiler, which excels at optimizing
12
+ operations on primitive values and tuples. This subpackage resolves this architectural tension through systematic AST manipulation
13
+ that preserves algorithmic correctness while enabling dramatic performance improvements.
7
14
 
8
15
  ## System Architecture
9
16
 
10
- The transformation system operates through a carefully orchestrated sequence of specialized modules, each contributing essential capabilities to the complete transformation process:
17
+ The transformation system operates through a carefully orchestrated sequence of specialized modules, each contributing essential
18
+ capabilities to the complete transformation process:
11
19
 
12
20
  ### Foundation Layer: Pattern Recognition and Structural Analysis
13
- - `_toolIfThis`: Extended predicate functions for identifying specific code patterns in AST nodes, particularly conditional expressions and control flow structures essential to map folding computations
14
- - `_toolkitContainers`: Dataclass decomposition containers that extract individual fields, type annotations, and reconstruction logic from dataclass definitions into manipulatable AST components
21
+ - `_toolIfThis`: Extended predicate functions for identifying specific code patterns in AST nodes, particularly conditional
22
+ expressions and control flow structures essential to map folding computations
23
+ - `_toolkitContainers`: Dataclass decomposition containers that extract individual fields, type annotations, and reconstruction
24
+ logic from dataclass definitions into manipulatable AST components
15
25
 
16
26
  ### Operational Core: Transformation Implementation
17
- - `transformationTools`: Core functions executing dataclass decomposition, function signature transformation, and calling convention adaptation that convert dataclass-accepting functions into primitive-parameter equivalents
18
- - `toolkitNumba`: Numba integration tools providing just-in-time compilation optimization with configurable performance parameters and strategic compiler directive application
27
+ - `transformationTools`: Core functions executing dataclass decomposition, function signature transformation, and calling
28
+ convention adaptation that convert dataclass-accepting functions into primitive-parameter equivalents
29
+ - `toolkitNumba`: Numba integration tools providing just-in-time compilation optimization with configurable performance parameters
30
+ and strategic compiler directive application
19
31
 
20
32
  ### Configuration and Orchestration
21
33
  - `infoBooth`: Configuration constants, computational complexity estimates, and default identifiers for systematic module generation and optimization decision-making
22
- - `RecipeJob`: Configuration management dataclasses that coordinate transformation parameters across multiple stages while maintaining consistency between source algorithms and target optimizations
23
- - `makeAllModules`: Comprehensive transformation orchestration tools that execute complete transformation processes for diverse computational strategies and performance characteristics
34
+ - `RecipeJob`: Configuration management dataclasses that coordinate transformation parameters across multiple stages while
35
+ maintaining consistency between source algorithms and target optimizations
36
+ - `makeAllModules`: Comprehensive transformation orchestration tools that execute complete transformation processes for diverse
37
+ computational strategies and performance characteristics
24
38
  - `makeJobTheorem2Numba`: Specialized job generation implementing the complete transformation sequence to produce standalone, highly optimized computation modules
25
39
 
26
40
  ### Utility Extensions
@@ -30,21 +44,32 @@ The transformation system operates through a carefully orchestrated sequence of
30
44
 
31
45
  The complete transformation follows a systematic three-stage pattern:
32
46
 
33
- 1. **Analysis and Decomposition**: Pattern recognition identifies dataclass structures and dependencies, followed by decomposition into constituent AST components including field definitions, type annotations, and initialization patterns.
47
+ 1. **Analysis and Decomposition**: Pattern recognition identifies dataclass structures and dependencies, followed by decomposition
48
+ into constituent AST components including field definitions, type annotations, and initialization patterns.
34
49
 
35
- 2. **Function Optimization**: Core transformations convert functions accepting dataclass parameters into functions accepting individual primitive parameters, with systematic updates to signatures, return types, and calling conventions.
50
+ 2. **Function Optimization**: Core transformations convert functions accepting dataclass parameters into functions accepting
51
+ individual primitive parameters, with systematic updates to signatures, return types, and calling conventions.
36
52
 
37
- 3. **Compilation Integration**: Numba decorators with carefully configured optimization parameters are applied to transformed functions, enabling aggressive just-in-time compilation with performance characteristics suitable for large-scale computational research.
53
+ 3. **Compilation Integration**: Numba decorators with carefully configured optimization parameters are applied to transformed
54
+ functions, enabling aggressive just-in-time compilation with performance characteristics suitable for large-scale computational
55
+ research.
38
56
 
39
57
  ## Generated Module Characteristics
40
58
 
41
- The transformation system produces standalone Python modules with embedded constants replacing parameterized values, eliminated dead code paths, optimized data structures, Numba compilation directives, progress feedback for long-running calculations, and consistent naming conventions with systematic filesystem organization. These modules maintain mathematical correctness while providing significant performance improvements essential to map folding research computational demands.
59
+ The transformation system produces standalone Python modules with embedded constants replacing parameterized values, eliminated
60
+ dead code paths, optimized data structures, Numba compilation directives, progress feedback for long-running calculations, and
61
+ consistent naming conventions with systematic filesystem organization. These modules maintain mathematical correctness while
62
+ providing significant performance improvements essential to map folding research computational demands.
42
63
 
43
64
  ## Usage Guidance
44
65
 
45
- Begin exploration with `infoBooth` for understanding configuration options and complexity estimates. Proceed to `transformationTools` for core transformation capabilities, then examine `RecipeJob` for orchestration patterns. Advanced users developing custom transformations should study `_toolIfThis` and `_toolkitContainers` for foundational pattern recognition and structural manipulation capabilities.
66
+ Begin exploration with `infoBooth` for understanding configuration options and complexity estimates. Proceed to
67
+ `transformationTools` for core transformation capabilities, then examine `RecipeJob` for orchestration patterns. Advanced users
68
+ developing custom transformations should study `_toolIfThis` and `_toolkitContainers` for foundational pattern recognition and
69
+ structural manipulation capabilities.
46
70
 
47
- The transformation system represents the culmination of systematic AST manipulation research, enabling previously intractable calculations through the strategic application of compiler optimization techniques to abstract mathematical algorithms.
71
+ The transformation system represents the culmination of systematic AST manipulation research, enabling previously intractable
72
+ calculations through the strategic application of compiler optimization techniques to abstract mathematical algorithms.
48
73
  """
49
74
 
50
75
  from mapFolding.someAssemblyRequired.infoBooth import (
@@ -24,97 +24,125 @@ Classes:
24
24
  algorithm transformations.
25
25
  """
26
26
 
27
- from astToolkit import Be, DOT, IfThis as astToolkit_IfThis
27
+ from astToolkit import Be, IfThis as astToolkit_IfThis
28
28
  from collections.abc import Callable
29
- from typing import TypeGuard
29
+ from typing_extensions import TypeIs
30
30
  import ast
31
31
 
32
32
  class IfThis(astToolkit_IfThis):
33
- """
34
- Provide predicate functions for matching and filtering AST nodes based on various criteria.
33
+ """Provide predicate functions for matching and filtering AST nodes based on various criteria.
34
+
35
+ (AI generated docstring)
35
36
 
36
- The IfThis class contains static methods that generate predicate functions used to test whether AST nodes match
37
- specific criteria. These predicates can be used with NodeChanger and NodeTourist to identify and process specific
37
+ The `IfThis` `class` contains static methods that generate predicate functions used to test whether AST nodes match
38
+ specific criteria. These predicates can be used with `NodeChanger` and `NodeTourist` to identify and process specific
38
39
  patterns in the AST.
39
40
 
40
- The class provides predicates for matching various node types, attributes, identifiers, and structural patterns,
41
+ The `class` provides predicates for matching various node types, attributes, identifiers, and structural patterns,
41
42
  enabling precise targeting of AST elements for analysis or transformation.
43
+
42
44
  """
43
45
 
44
46
  @staticmethod
45
- def isAttributeNamespaceIdentifierGreaterThan0(namespace: str, identifier: str) -> Callable[[ast.AST], TypeGuard[ast.Compare] | bool]:
47
+ def isAttributeNamespaceIdentifierLessThanOrEqual0(namespace: str, identifier: str) -> Callable[[ast.AST], TypeIs[ast.Compare]]:
48
+ """Generate a predicate that matches comparison expressions testing if a namespaced attribute is less than or equal to 0.
49
+
50
+ (AI generated docstring)
51
+
52
+ This function creates a predicate that identifies AST nodes representing comparisons
53
+ of the form `namespace.identifier <= 0`. It's used to identify conditional
54
+ expressions that test non-positive values of counting variables or similar constructs.
55
+
56
+ Parameters
57
+ ----------
58
+ namespace : str
59
+ The namespace or object name containing the attribute.
60
+ identifier : str
61
+ The attribute name to test.
62
+
63
+ Returns
64
+ -------
65
+ predicate : Callable[[ast.AST], TypeIs[ast.Compare]]
66
+ A predicate function that returns `True` for `Compare` nodes matching the pattern.
67
+
46
68
  """
47
- Generate a predicate that matches comparison expressions testing if a namespaced attribute is greater than 0.
69
+ return lambda node: (Be.Compare.leftIs(IfThis.isAttributeNamespaceIdentifier(namespace, identifier))(node)
70
+ and Be.Compare.opsIs(lambda at: Be.LtE(at[0]))(node)
71
+ )
72
+
73
+ @staticmethod
74
+ def isAttributeNamespaceIdentifierGreaterThan0(namespace: str, identifier: str) -> Callable[[ast.AST], TypeIs[ast.Compare] | bool]:
75
+ """Generate a predicate that matches comparison expressions testing if a namespaced attribute is greater than 0.
76
+
77
+ (AI generated docstring)
48
78
 
49
79
  This function creates a predicate that identifies AST nodes representing comparisons
50
80
  of the form `namespace.identifier > 0`. It's commonly used to identify conditional
51
81
  expressions that test positive values of counting variables or similar constructs.
52
82
 
53
- Parameters:
54
- namespace: The namespace or object name containing the attribute
55
- identifier: The attribute name to test
83
+ Parameters
84
+ ----------
85
+ namespace : str
86
+ The namespace or object name containing the attribute.
87
+ identifier : str
88
+ The attribute name to test.
89
+
90
+ Returns
91
+ -------
92
+ predicate : Callable[[ast.AST], TypeIs[ast.Compare]]
93
+ A predicate function that returns `True` for `Compare` nodes matching the pattern.
56
94
 
57
- Returns:
58
- A predicate function that returns True for Compare nodes matching the pattern
59
95
  """
60
- return lambda node: (Be.Compare(node)
61
- and IfThis.isAttributeNamespaceIdentifier(namespace, identifier)(DOT.left(node))
96
+ return lambda node: (Be.Compare.leftIs(IfThis.isAttributeNamespaceIdentifier(namespace, identifier))(node)
62
97
  and Be.Gt(node.ops[0])
63
98
  and IfThis.isConstant_value(0)(node.comparators[0]))
64
99
 
65
100
  @staticmethod
66
- def isIfAttributeNamespaceIdentifierGreaterThan0(namespace: str, identifier: str) -> Callable[[ast.AST], TypeGuard[ast.If] | bool]:
67
- """
68
- Generate a predicate that matches If statements testing if a namespaced attribute is greater than 0.
101
+ def isIfAttributeNamespaceIdentifierGreaterThan0(namespace: str, identifier: str) -> Callable[[ast.AST], TypeIs[ast.If]]:
102
+ """Generate a predicate that matches If statements testing if a namespaced attribute is greater than 0.
103
+
104
+ (AI generated docstring)
69
105
 
70
106
  This function creates a predicate that identifies AST nodes representing conditional
71
107
  statements of the form `if namespace.identifier > 0:`. It's used to find control
72
108
  flow structures that depend on positive values of specific attributes.
73
109
 
74
- Parameters:
75
- namespace: The namespace or object name containing the attribute
76
- identifier: The attribute name to test
110
+ Parameters
111
+ ----------
112
+ namespace : str
113
+ The namespace or object name containing the attribute.
114
+ identifier : str
115
+ The attribute name to test.
116
+
117
+ Returns
118
+ -------
119
+ predicate : Callable[[ast.AST], TypeIs[ast.If]]
120
+ A predicate function that returns `True` for `If` nodes with the specified test condition.
77
121
 
78
- Returns:
79
- A predicate function that returns True for If nodes with the specified test condition
80
122
  """
81
- return lambda node: (Be.If(node)
82
- and IfThis.isAttributeNamespaceIdentifierGreaterThan0(namespace, identifier)(DOT.test(node)))
123
+ return Be.If.testIs(IfThis.isAttributeNamespaceIdentifierGreaterThan0(namespace, identifier))
83
124
 
84
125
  @staticmethod
85
- def isWhileAttributeNamespaceIdentifierGreaterThan0(namespace: str, identifier: str) -> Callable[[ast.AST], TypeGuard[ast.While] | bool]:
86
- """
87
- Generate a predicate that matches While loops testing if a namespaced attribute is greater than 0.
126
+ def isWhileAttributeNamespaceIdentifierGreaterThan0(namespace: str, identifier: str) -> Callable[[ast.AST], TypeIs[ast.While]]:
127
+ """Generate a predicate that matches While loops testing if a namespaced attribute is greater than 0.
128
+
129
+ (AI generated docstring)
88
130
 
89
131
  This function creates a predicate that identifies AST nodes representing loop
90
132
  statements of the form `while namespace.identifier > 0:`. It's used to find
91
133
  iteration constructs that continue while specific attributes remain positive.
92
134
 
93
- Parameters:
94
- namespace: The namespace or object name containing the attribute
95
- identifier: The attribute name to test
96
-
97
- Returns:
98
- A predicate function that returns True for While nodes with the specified test condition
99
- """
100
- return lambda node: (Be.While(node)
101
- and IfThis.isAttributeNamespaceIdentifierGreaterThan0(namespace, identifier)(DOT.test(node)))
102
- @staticmethod
103
- def isAttributeNamespaceIdentifierLessThanOrEqual0(namespace: str, identifier: str) -> Callable[[ast.AST], TypeGuard[ast.Compare] | bool]:
104
- """
105
- Generate a predicate that matches comparison expressions testing if a namespaced attribute is less than or equal to 0.
106
-
107
- This function creates a predicate that identifies AST nodes representing comparisons
108
- of the form `namespace.identifier <= 0`. It's used to identify conditional
109
- expressions that test non-positive values of counting variables or similar constructs.
135
+ Parameters
136
+ ----------
137
+ namespace : str
138
+ The namespace or object name containing the attribute.
139
+ identifier : str
140
+ The attribute name to test.
110
141
 
111
- Parameters:
112
- namespace: The namespace or object name containing the attribute
113
- identifier: The attribute name to test
142
+ Returns
143
+ -------
144
+ predicate : Callable[[ast.AST], TypeIs[ast.While]]
145
+ A predicate function that returns `True` for `While` nodes with the specified test condition.
114
146
 
115
- Returns:
116
- A predicate function that returns True for Compare nodes matching the pattern
117
147
  """
118
- return lambda node: (Be.Compare(node)
119
- and IfThis.isAttributeNamespaceIdentifier(namespace, identifier)(DOT.left(node))
120
- and Be.LtE(node.ops[0]))
148
+ return Be.While.testIs(IfThis.isAttributeNamespaceIdentifierGreaterThan0(namespace, identifier))